OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [simplify-rtx.c] - Blame information for rev 280

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* RTL simplification functions for GNU compiler.
2
   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
 
23
#include "config.h"
24
#include "system.h"
25
#include "coretypes.h"
26
#include "tm.h"
27
#include "rtl.h"
28
#include "tree.h"
29
#include "tm_p.h"
30
#include "regs.h"
31
#include "hard-reg-set.h"
32
#include "flags.h"
33
#include "real.h"
34
#include "insn-config.h"
35
#include "recog.h"
36
#include "function.h"
37
#include "expr.h"
38
#include "toplev.h"
39
#include "output.h"
40
#include "ggc.h"
41
#include "target.h"
42
 
43
/* Simplification and canonicalization of RTL.  */
44
 
45
/* Much code operates on (low, high) pairs; the low value is an
46
   unsigned wide int, the high value a signed wide int.  We
47
   occasionally need to sign extend from low to high as if low were a
48
   signed wide int.  */
49
#define HWI_SIGN_EXTEND(low) \
50
 ((((HOST_WIDE_INT) low) < 0) ? ((HOST_WIDE_INT) -1) : ((HOST_WIDE_INT) 0))
51
 
52
static rtx neg_const_int (enum machine_mode, const_rtx);
53
static bool plus_minus_operand_p (const_rtx);
54
static bool simplify_plus_minus_op_data_cmp (rtx, rtx);
55
static rtx simplify_plus_minus (enum rtx_code, enum machine_mode, rtx, rtx);
56
static rtx simplify_immed_subreg (enum machine_mode, rtx, enum machine_mode,
57
                                  unsigned int);
58
static rtx simplify_associative_operation (enum rtx_code, enum machine_mode,
59
                                           rtx, rtx);
60
static rtx simplify_relational_operation_1 (enum rtx_code, enum machine_mode,
61
                                            enum machine_mode, rtx, rtx);
62
static rtx simplify_unary_operation_1 (enum rtx_code, enum machine_mode, rtx);
63
static rtx simplify_binary_operation_1 (enum rtx_code, enum machine_mode,
64
                                        rtx, rtx, rtx, rtx);
65
 
66
/* Negate a CONST_INT rtx, truncating (because a conversion from a
67
   maximally negative number can overflow).  */
68
static rtx
69
neg_const_int (enum machine_mode mode, const_rtx i)
70
{
71
  return gen_int_mode (- INTVAL (i), mode);
72
}
73
 
74
/* Test whether expression, X, is an immediate constant that represents
75
   the most significant bit of machine mode MODE.  */
76
 
77
bool
78
mode_signbit_p (enum machine_mode mode, const_rtx x)
79
{
80
  unsigned HOST_WIDE_INT val;
81
  unsigned int width;
82
 
83
  if (GET_MODE_CLASS (mode) != MODE_INT)
84
    return false;
85
 
86
  width = GET_MODE_BITSIZE (mode);
87
  if (width == 0)
88
    return false;
89
 
90
  if (width <= HOST_BITS_PER_WIDE_INT
91
      && CONST_INT_P (x))
92
    val = INTVAL (x);
93
  else if (width <= 2 * HOST_BITS_PER_WIDE_INT
94
           && GET_CODE (x) == CONST_DOUBLE
95
           && CONST_DOUBLE_LOW (x) == 0)
96
    {
97
      val = CONST_DOUBLE_HIGH (x);
98
      width -= HOST_BITS_PER_WIDE_INT;
99
    }
100
  else
101
    return false;
102
 
103
  if (width < HOST_BITS_PER_WIDE_INT)
104
    val &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
105
  return val == ((unsigned HOST_WIDE_INT) 1 << (width - 1));
106
}
107
 
108
/* Make a binary operation by properly ordering the operands and
109
   seeing if the expression folds.  */
110
 
111
rtx
112
simplify_gen_binary (enum rtx_code code, enum machine_mode mode, rtx op0,
113
                     rtx op1)
114
{
115
  rtx tem;
116
 
117
  /* If this simplifies, do it.  */
118
  tem = simplify_binary_operation (code, mode, op0, op1);
119
  if (tem)
120
    return tem;
121
 
122
  /* Put complex operands first and constants second if commutative.  */
123
  if (GET_RTX_CLASS (code) == RTX_COMM_ARITH
124
      && swap_commutative_operands_p (op0, op1))
125
    tem = op0, op0 = op1, op1 = tem;
126
 
127
  return gen_rtx_fmt_ee (code, mode, op0, op1);
128
}
129
 
130
/* If X is a MEM referencing the constant pool, return the real value.
131
   Otherwise return X.  */
132
rtx
133
avoid_constant_pool_reference (rtx x)
134
{
135
  rtx c, tmp, addr;
136
  enum machine_mode cmode;
137
  HOST_WIDE_INT offset = 0;
138
 
139
  switch (GET_CODE (x))
140
    {
141
    case MEM:
142
      break;
143
 
144
    case FLOAT_EXTEND:
145
      /* Handle float extensions of constant pool references.  */
146
      tmp = XEXP (x, 0);
147
      c = avoid_constant_pool_reference (tmp);
148
      if (c != tmp && GET_CODE (c) == CONST_DOUBLE)
149
        {
150
          REAL_VALUE_TYPE d;
151
 
152
          REAL_VALUE_FROM_CONST_DOUBLE (d, c);
153
          return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
154
        }
155
      return x;
156
 
157
    default:
158
      return x;
159
    }
160
 
161
  if (GET_MODE (x) == BLKmode)
162
    return x;
163
 
164
  addr = XEXP (x, 0);
165
 
166
  /* Call target hook to avoid the effects of -fpic etc....  */
167
  addr = targetm.delegitimize_address (addr);
168
 
169
  /* Split the address into a base and integer offset.  */
170
  if (GET_CODE (addr) == CONST
171
      && GET_CODE (XEXP (addr, 0)) == PLUS
172
      && CONST_INT_P (XEXP (XEXP (addr, 0), 1)))
173
    {
174
      offset = INTVAL (XEXP (XEXP (addr, 0), 1));
175
      addr = XEXP (XEXP (addr, 0), 0);
176
    }
177
 
178
  if (GET_CODE (addr) == LO_SUM)
179
    addr = XEXP (addr, 1);
180
 
181
  /* If this is a constant pool reference, we can turn it into its
182
     constant and hope that simplifications happen.  */
183
  if (GET_CODE (addr) == SYMBOL_REF
184
      && CONSTANT_POOL_ADDRESS_P (addr))
185
    {
186
      c = get_pool_constant (addr);
187
      cmode = get_pool_mode (addr);
188
 
189
      /* If we're accessing the constant in a different mode than it was
190
         originally stored, attempt to fix that up via subreg simplifications.
191
         If that fails we have no choice but to return the original memory.  */
192
      if (offset != 0 || cmode != GET_MODE (x))
193
        {
194
          rtx tem = simplify_subreg (GET_MODE (x), c, cmode, offset);
195
          if (tem && CONSTANT_P (tem))
196
            return tem;
197
        }
198
      else
199
        return c;
200
    }
201
 
202
  return x;
203
}
204
 
205
/* Simplify a MEM based on its attributes.  This is the default
206
   delegitimize_address target hook, and it's recommended that every
207
   overrider call it.  */
208
 
209
rtx
210
delegitimize_mem_from_attrs (rtx x)
211
{
212
  /* MEMs without MEM_OFFSETs may have been offset, so we can't just
213
     use their base addresses as equivalent.  */
214
  if (MEM_P (x)
215
      && MEM_EXPR (x)
216
      && MEM_OFFSET (x))
217
    {
218
      tree decl = MEM_EXPR (x);
219
      enum machine_mode mode = GET_MODE (x);
220
      HOST_WIDE_INT offset = 0;
221
 
222
      switch (TREE_CODE (decl))
223
        {
224
        default:
225
          decl = NULL;
226
          break;
227
 
228
        case VAR_DECL:
229
          break;
230
 
231
        case ARRAY_REF:
232
        case ARRAY_RANGE_REF:
233
        case COMPONENT_REF:
234
        case BIT_FIELD_REF:
235
        case REALPART_EXPR:
236
        case IMAGPART_EXPR:
237
        case VIEW_CONVERT_EXPR:
238
          {
239
            HOST_WIDE_INT bitsize, bitpos;
240
            tree toffset;
241
            int unsignedp = 0, volatilep = 0;
242
 
243
            decl = get_inner_reference (decl, &bitsize, &bitpos, &toffset,
244
                                        &mode, &unsignedp, &volatilep, false);
245
            if (bitsize != GET_MODE_BITSIZE (mode)
246
                || (bitpos % BITS_PER_UNIT)
247
                || (toffset && !host_integerp (toffset, 0)))
248
              decl = NULL;
249
            else
250
              {
251
                offset += bitpos / BITS_PER_UNIT;
252
                if (toffset)
253
                  offset += TREE_INT_CST_LOW (toffset);
254
              }
255
            break;
256
          }
257
        }
258
 
259
      if (decl
260
          && mode == GET_MODE (x)
261
          && TREE_CODE (decl) == VAR_DECL
262
          && (TREE_STATIC (decl)
263
              || DECL_THREAD_LOCAL_P (decl))
264
          && DECL_RTL_SET_P (decl)
265
          && MEM_P (DECL_RTL (decl)))
266
        {
267
          rtx newx;
268
 
269
          offset += INTVAL (MEM_OFFSET (x));
270
 
271
          newx = DECL_RTL (decl);
272
 
273
          if (MEM_P (newx))
274
            {
275
              rtx n = XEXP (newx, 0), o = XEXP (x, 0);
276
 
277
              /* Avoid creating a new MEM needlessly if we already had
278
                 the same address.  We do if there's no OFFSET and the
279
                 old address X is identical to NEWX, or if X is of the
280
                 form (plus NEWX OFFSET), or the NEWX is of the form
281
                 (plus Y (const_int Z)) and X is that with the offset
282
                 added: (plus Y (const_int Z+OFFSET)).  */
283
              if (!((offset == 0
284
                     || (GET_CODE (o) == PLUS
285
                         && GET_CODE (XEXP (o, 1)) == CONST_INT
286
                         && (offset == INTVAL (XEXP (o, 1))
287
                             || (GET_CODE (n) == PLUS
288
                                 && GET_CODE (XEXP (n, 1)) == CONST_INT
289
                                 && (INTVAL (XEXP (n, 1)) + offset
290
                                     == INTVAL (XEXP (o, 1)))
291
                                 && (n = XEXP (n, 0))))
292
                         && (o = XEXP (o, 0))))
293
                    && rtx_equal_p (o, n)))
294
                x = adjust_address_nv (newx, mode, offset);
295
            }
296
          else if (GET_MODE (x) == GET_MODE (newx)
297
                   && offset == 0)
298
            x = newx;
299
        }
300
    }
301
 
302
  return x;
303
}
304
 
305
/* Make a unary operation by first seeing if it folds and otherwise making
306
   the specified operation.  */
307
 
308
rtx
309
simplify_gen_unary (enum rtx_code code, enum machine_mode mode, rtx op,
310
                    enum machine_mode op_mode)
311
{
312
  rtx tem;
313
 
314
  /* If this simplifies, use it.  */
315
  if ((tem = simplify_unary_operation (code, mode, op, op_mode)) != 0)
316
    return tem;
317
 
318
  return gen_rtx_fmt_e (code, mode, op);
319
}
320
 
321
/* Likewise for ternary operations.  */
322
 
323
rtx
324
simplify_gen_ternary (enum rtx_code code, enum machine_mode mode,
325
                      enum machine_mode op0_mode, rtx op0, rtx op1, rtx op2)
326
{
327
  rtx tem;
328
 
329
  /* If this simplifies, use it.  */
330
  if (0 != (tem = simplify_ternary_operation (code, mode, op0_mode,
331
                                              op0, op1, op2)))
332
    return tem;
333
 
334
  return gen_rtx_fmt_eee (code, mode, op0, op1, op2);
335
}
336
 
337
/* Likewise, for relational operations.
338
   CMP_MODE specifies mode comparison is done in.  */
339
 
340
rtx
341
simplify_gen_relational (enum rtx_code code, enum machine_mode mode,
342
                         enum machine_mode cmp_mode, rtx op0, rtx op1)
343
{
344
  rtx tem;
345
 
346
  if (0 != (tem = simplify_relational_operation (code, mode, cmp_mode,
347
                                                 op0, op1)))
348
    return tem;
349
 
350
  return gen_rtx_fmt_ee (code, mode, op0, op1);
351
}
352
 
353
/* If FN is NULL, replace all occurrences of OLD_RTX in X with copy_rtx (DATA)
354
   and simplify the result.  If FN is non-NULL, call this callback on each
355
   X, if it returns non-NULL, replace X with its return value and simplify the
356
   result.  */
357
 
358
rtx
359
simplify_replace_fn_rtx (rtx x, const_rtx old_rtx,
360
                         rtx (*fn) (rtx, const_rtx, void *), void *data)
361
{
362
  enum rtx_code code = GET_CODE (x);
363
  enum machine_mode mode = GET_MODE (x);
364
  enum machine_mode op_mode;
365
  const char *fmt;
366
  rtx op0, op1, op2, newx, op;
367
  rtvec vec, newvec;
368
  int i, j;
369
 
370
  if (__builtin_expect (fn != NULL, 0))
371
    {
372
      newx = fn (x, old_rtx, data);
373
      if (newx)
374
        return newx;
375
    }
376
  else if (rtx_equal_p (x, old_rtx))
377
    return copy_rtx ((rtx) data);
378
 
379
  switch (GET_RTX_CLASS (code))
380
    {
381
    case RTX_UNARY:
382
      op0 = XEXP (x, 0);
383
      op_mode = GET_MODE (op0);
384
      op0 = simplify_replace_fn_rtx (op0, old_rtx, fn, data);
385
      if (op0 == XEXP (x, 0))
386
        return x;
387
      return simplify_gen_unary (code, mode, op0, op_mode);
388
 
389
    case RTX_BIN_ARITH:
390
    case RTX_COMM_ARITH:
391
      op0 = simplify_replace_fn_rtx (XEXP (x, 0), old_rtx, fn, data);
392
      op1 = simplify_replace_fn_rtx (XEXP (x, 1), old_rtx, fn, data);
393
      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
394
        return x;
395
      return simplify_gen_binary (code, mode, op0, op1);
396
 
397
    case RTX_COMPARE:
398
    case RTX_COMM_COMPARE:
399
      op0 = XEXP (x, 0);
400
      op1 = XEXP (x, 1);
401
      op_mode = GET_MODE (op0) != VOIDmode ? GET_MODE (op0) : GET_MODE (op1);
402
      op0 = simplify_replace_fn_rtx (op0, old_rtx, fn, data);
403
      op1 = simplify_replace_fn_rtx (op1, old_rtx, fn, data);
404
      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
405
        return x;
406
      return simplify_gen_relational (code, mode, op_mode, op0, op1);
407
 
408
    case RTX_TERNARY:
409
    case RTX_BITFIELD_OPS:
410
      op0 = XEXP (x, 0);
411
      op_mode = GET_MODE (op0);
412
      op0 = simplify_replace_fn_rtx (op0, old_rtx, fn, data);
413
      op1 = simplify_replace_fn_rtx (XEXP (x, 1), old_rtx, fn, data);
414
      op2 = simplify_replace_fn_rtx (XEXP (x, 2), old_rtx, fn, data);
415
      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1) && op2 == XEXP (x, 2))
416
        return x;
417
      if (op_mode == VOIDmode)
418
        op_mode = GET_MODE (op0);
419
      return simplify_gen_ternary (code, mode, op_mode, op0, op1, op2);
420
 
421
    case RTX_EXTRA:
422
      if (code == SUBREG)
423
        {
424
          op0 = simplify_replace_fn_rtx (SUBREG_REG (x), old_rtx, fn, data);
425
          if (op0 == SUBREG_REG (x))
426
            return x;
427
          op0 = simplify_gen_subreg (GET_MODE (x), op0,
428
                                     GET_MODE (SUBREG_REG (x)),
429
                                     SUBREG_BYTE (x));
430
          return op0 ? op0 : x;
431
        }
432
      break;
433
 
434
    case RTX_OBJ:
435
      if (code == MEM)
436
        {
437
          op0 = simplify_replace_fn_rtx (XEXP (x, 0), old_rtx, fn, data);
438
          if (op0 == XEXP (x, 0))
439
            return x;
440
          return replace_equiv_address_nv (x, op0);
441
        }
442
      else if (code == LO_SUM)
443
        {
444
          op0 = simplify_replace_fn_rtx (XEXP (x, 0), old_rtx, fn, data);
445
          op1 = simplify_replace_fn_rtx (XEXP (x, 1), old_rtx, fn, data);
446
 
447
          /* (lo_sum (high x) x) -> x  */
448
          if (GET_CODE (op0) == HIGH && rtx_equal_p (XEXP (op0, 0), op1))
449
            return op1;
450
 
451
          if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
452
            return x;
453
          return gen_rtx_LO_SUM (mode, op0, op1);
454
        }
455
      break;
456
 
457
    default:
458
      break;
459
    }
460
 
461
  newx = x;
462
  fmt = GET_RTX_FORMAT (code);
463
  for (i = 0; fmt[i]; i++)
464
    switch (fmt[i])
465
      {
466
      case 'E':
467
        vec = XVEC (x, i);
468
        newvec = XVEC (newx, i);
469
        for (j = 0; j < GET_NUM_ELEM (vec); j++)
470
          {
471
            op = simplify_replace_fn_rtx (RTVEC_ELT (vec, j),
472
                                          old_rtx, fn, data);
473
            if (op != RTVEC_ELT (vec, j))
474
              {
475
                if (newvec == vec)
476
                  {
477
                    newvec = shallow_copy_rtvec (vec);
478
                    if (x == newx)
479
                      newx = shallow_copy_rtx (x);
480
                    XVEC (newx, i) = newvec;
481
                  }
482
                RTVEC_ELT (newvec, j) = op;
483
              }
484
          }
485
        break;
486
 
487
      case 'e':
488
        if (XEXP (x, i))
489
          {
490
            op = simplify_replace_fn_rtx (XEXP (x, i), old_rtx, fn, data);
491
            if (op != XEXP (x, i))
492
              {
493
                if (x == newx)
494
                  newx = shallow_copy_rtx (x);
495
                XEXP (newx, i) = op;
496
              }
497
          }
498
        break;
499
      }
500
  return newx;
501
}
502
 
503
/* Replace all occurrences of OLD_RTX in X with NEW_RTX and try to simplify the
504
   resulting RTX.  Return a new RTX which is as simplified as possible.  */
505
 
506
rtx
507
simplify_replace_rtx (rtx x, const_rtx old_rtx, rtx new_rtx)
508
{
509
  return simplify_replace_fn_rtx (x, old_rtx, 0, new_rtx);
510
}
511
 
512
/* Try to simplify a unary operation CODE whose output mode is to be
513
   MODE with input operand OP whose mode was originally OP_MODE.
514
   Return zero if no simplification can be made.  */
515
rtx
516
simplify_unary_operation (enum rtx_code code, enum machine_mode mode,
517
                          rtx op, enum machine_mode op_mode)
518
{
519
  rtx trueop, tem;
520
 
521
  trueop = avoid_constant_pool_reference (op);
522
 
523
  tem = simplify_const_unary_operation (code, mode, trueop, op_mode);
524
  if (tem)
525
    return tem;
526
 
527
  return simplify_unary_operation_1 (code, mode, op);
528
}
529
 
530
/* Perform some simplifications we can do even if the operands
531
   aren't constant.  */
532
static rtx
533
simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
534
{
535
  enum rtx_code reversed;
536
  rtx temp;
537
 
538
  switch (code)
539
    {
540
    case NOT:
541
      /* (not (not X)) == X.  */
542
      if (GET_CODE (op) == NOT)
543
        return XEXP (op, 0);
544
 
545
      /* (not (eq X Y)) == (ne X Y), etc. if BImode or the result of the
546
         comparison is all ones.   */
547
      if (COMPARISON_P (op)
548
          && (mode == BImode || STORE_FLAG_VALUE == -1)
549
          && ((reversed = reversed_comparison_code (op, NULL_RTX)) != UNKNOWN))
550
        return simplify_gen_relational (reversed, mode, VOIDmode,
551
                                        XEXP (op, 0), XEXP (op, 1));
552
 
553
      /* (not (plus X -1)) can become (neg X).  */
554
      if (GET_CODE (op) == PLUS
555
          && XEXP (op, 1) == constm1_rtx)
556
        return simplify_gen_unary (NEG, mode, XEXP (op, 0), mode);
557
 
558
      /* Similarly, (not (neg X)) is (plus X -1).  */
559
      if (GET_CODE (op) == NEG)
560
        return plus_constant (XEXP (op, 0), -1);
561
 
562
      /* (not (xor X C)) for C constant is (xor X D) with D = ~C.  */
563
      if (GET_CODE (op) == XOR
564
          && CONST_INT_P (XEXP (op, 1))
565
          && (temp = simplify_unary_operation (NOT, mode,
566
                                               XEXP (op, 1), mode)) != 0)
567
        return simplify_gen_binary (XOR, mode, XEXP (op, 0), temp);
568
 
569
      /* (not (plus X C)) for signbit C is (xor X D) with D = ~C.  */
570
      if (GET_CODE (op) == PLUS
571
          && CONST_INT_P (XEXP (op, 1))
572
          && mode_signbit_p (mode, XEXP (op, 1))
573
          && (temp = simplify_unary_operation (NOT, mode,
574
                                               XEXP (op, 1), mode)) != 0)
575
        return simplify_gen_binary (XOR, mode, XEXP (op, 0), temp);
576
 
577
 
578
      /* (not (ashift 1 X)) is (rotate ~1 X).  We used to do this for
579
         operands other than 1, but that is not valid.  We could do a
580
         similar simplification for (not (lshiftrt C X)) where C is
581
         just the sign bit, but this doesn't seem common enough to
582
         bother with.  */
583
      if (GET_CODE (op) == ASHIFT
584
          && XEXP (op, 0) == const1_rtx)
585
        {
586
          temp = simplify_gen_unary (NOT, mode, const1_rtx, mode);
587
          return simplify_gen_binary (ROTATE, mode, temp, XEXP (op, 1));
588
        }
589
 
590
      /* (not (ashiftrt foo C)) where C is the number of bits in FOO
591
         minus 1 is (ge foo (const_int 0)) if STORE_FLAG_VALUE is -1,
592
         so we can perform the above simplification.  */
593
 
594
      if (STORE_FLAG_VALUE == -1
595
          && GET_CODE (op) == ASHIFTRT
596
          && GET_CODE (XEXP (op, 1))
597
          && INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1)
598
        return simplify_gen_relational (GE, mode, VOIDmode,
599
                                        XEXP (op, 0), const0_rtx);
600
 
601
 
602
      if (GET_CODE (op) == SUBREG
603
          && subreg_lowpart_p (op)
604
          && (GET_MODE_SIZE (GET_MODE (op))
605
              < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
606
          && GET_CODE (SUBREG_REG (op)) == ASHIFT
607
          && XEXP (SUBREG_REG (op), 0) == const1_rtx)
608
        {
609
          enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op));
610
          rtx x;
611
 
612
          x = gen_rtx_ROTATE (inner_mode,
613
                              simplify_gen_unary (NOT, inner_mode, const1_rtx,
614
                                                  inner_mode),
615
                              XEXP (SUBREG_REG (op), 1));
616
          return rtl_hooks.gen_lowpart_no_emit (mode, x);
617
        }
618
 
619
      /* Apply De Morgan's laws to reduce number of patterns for machines
620
         with negating logical insns (and-not, nand, etc.).  If result has
621
         only one NOT, put it first, since that is how the patterns are
622
         coded.  */
623
 
624
      if (GET_CODE (op) == IOR || GET_CODE (op) == AND)
625
        {
626
          rtx in1 = XEXP (op, 0), in2 = XEXP (op, 1);
627
          enum machine_mode op_mode;
628
 
629
          op_mode = GET_MODE (in1);
630
          in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
631
 
632
          op_mode = GET_MODE (in2);
633
          if (op_mode == VOIDmode)
634
            op_mode = mode;
635
          in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
636
 
637
          if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
638
            {
639
              rtx tem = in2;
640
              in2 = in1; in1 = tem;
641
            }
642
 
643
          return gen_rtx_fmt_ee (GET_CODE (op) == IOR ? AND : IOR,
644
                                 mode, in1, in2);
645
        }
646
      break;
647
 
648
    case NEG:
649
      /* (neg (neg X)) == X.  */
650
      if (GET_CODE (op) == NEG)
651
        return XEXP (op, 0);
652
 
653
      /* (neg (plus X 1)) can become (not X).  */
654
      if (GET_CODE (op) == PLUS
655
          && XEXP (op, 1) == const1_rtx)
656
        return simplify_gen_unary (NOT, mode, XEXP (op, 0), mode);
657
 
658
      /* Similarly, (neg (not X)) is (plus X 1).  */
659
      if (GET_CODE (op) == NOT)
660
        return plus_constant (XEXP (op, 0), 1);
661
 
662
      /* (neg (minus X Y)) can become (minus Y X).  This transformation
663
         isn't safe for modes with signed zeros, since if X and Y are
664
         both +0, (minus Y X) is the same as (minus X Y).  If the
665
         rounding mode is towards +infinity (or -infinity) then the two
666
         expressions will be rounded differently.  */
667
      if (GET_CODE (op) == MINUS
668
          && !HONOR_SIGNED_ZEROS (mode)
669
          && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
670
        return simplify_gen_binary (MINUS, mode, XEXP (op, 1), XEXP (op, 0));
671
 
672
      if (GET_CODE (op) == PLUS
673
          && !HONOR_SIGNED_ZEROS (mode)
674
          && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
675
        {
676
          /* (neg (plus A C)) is simplified to (minus -C A).  */
677
          if (CONST_INT_P (XEXP (op, 1))
678
              || GET_CODE (XEXP (op, 1)) == CONST_DOUBLE)
679
            {
680
              temp = simplify_unary_operation (NEG, mode, XEXP (op, 1), mode);
681
              if (temp)
682
                return simplify_gen_binary (MINUS, mode, temp, XEXP (op, 0));
683
            }
684
 
685
          /* (neg (plus A B)) is canonicalized to (minus (neg A) B).  */
686
          temp = simplify_gen_unary (NEG, mode, XEXP (op, 0), mode);
687
          return simplify_gen_binary (MINUS, mode, temp, XEXP (op, 1));
688
        }
689
 
690
      /* (neg (mult A B)) becomes (mult (neg A) B).
691
         This works even for floating-point values.  */
692
      if (GET_CODE (op) == MULT
693
          && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
694
        {
695
          temp = simplify_gen_unary (NEG, mode, XEXP (op, 0), mode);
696
          return simplify_gen_binary (MULT, mode, temp, XEXP (op, 1));
697
        }
698
 
699
      /* NEG commutes with ASHIFT since it is multiplication.  Only do
700
         this if we can then eliminate the NEG (e.g., if the operand
701
         is a constant).  */
702
      if (GET_CODE (op) == ASHIFT)
703
        {
704
          temp = simplify_unary_operation (NEG, mode, XEXP (op, 0), mode);
705
          if (temp)
706
            return simplify_gen_binary (ASHIFT, mode, temp, XEXP (op, 1));
707
        }
708
 
709
      /* (neg (ashiftrt X C)) can be replaced by (lshiftrt X C) when
710
         C is equal to the width of MODE minus 1.  */
711
      if (GET_CODE (op) == ASHIFTRT
712
          && CONST_INT_P (XEXP (op, 1))
713
          && INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1)
714
        return simplify_gen_binary (LSHIFTRT, mode,
715
                                    XEXP (op, 0), XEXP (op, 1));
716
 
717
      /* (neg (lshiftrt X C)) can be replaced by (ashiftrt X C) when
718
         C is equal to the width of MODE minus 1.  */
719
      if (GET_CODE (op) == LSHIFTRT
720
          && CONST_INT_P (XEXP (op, 1))
721
          && INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1)
722
        return simplify_gen_binary (ASHIFTRT, mode,
723
                                    XEXP (op, 0), XEXP (op, 1));
724
 
725
      /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1.  */
726
      if (GET_CODE (op) == XOR
727
          && XEXP (op, 1) == const1_rtx
728
          && nonzero_bits (XEXP (op, 0), mode) == 1)
729
        return plus_constant (XEXP (op, 0), -1);
730
 
731
      /* (neg (lt x 0)) is (ashiftrt X C) if STORE_FLAG_VALUE is 1.  */
732
      /* (neg (lt x 0)) is (lshiftrt X C) if STORE_FLAG_VALUE is -1.  */
733
      if (GET_CODE (op) == LT
734
          && XEXP (op, 1) == const0_rtx
735
          && SCALAR_INT_MODE_P (GET_MODE (XEXP (op, 0))))
736
        {
737
          enum machine_mode inner = GET_MODE (XEXP (op, 0));
738
          int isize = GET_MODE_BITSIZE (inner);
739
          if (STORE_FLAG_VALUE == 1)
740
            {
741
              temp = simplify_gen_binary (ASHIFTRT, inner, XEXP (op, 0),
742
                                          GEN_INT (isize - 1));
743
              if (mode == inner)
744
                return temp;
745
              if (GET_MODE_BITSIZE (mode) > isize)
746
                return simplify_gen_unary (SIGN_EXTEND, mode, temp, inner);
747
              return simplify_gen_unary (TRUNCATE, mode, temp, inner);
748
            }
749
          else if (STORE_FLAG_VALUE == -1)
750
            {
751
              temp = simplify_gen_binary (LSHIFTRT, inner, XEXP (op, 0),
752
                                          GEN_INT (isize - 1));
753
              if (mode == inner)
754
                return temp;
755
              if (GET_MODE_BITSIZE (mode) > isize)
756
                return simplify_gen_unary (ZERO_EXTEND, mode, temp, inner);
757
              return simplify_gen_unary (TRUNCATE, mode, temp, inner);
758
            }
759
        }
760
      break;
761
 
762
    case TRUNCATE:
763
      /* We can't handle truncation to a partial integer mode here
764
         because we don't know the real bitsize of the partial
765
         integer mode.  */
766
      if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
767
        break;
768
 
769
      /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI.  */
770
      if ((GET_CODE (op) == SIGN_EXTEND
771
           || GET_CODE (op) == ZERO_EXTEND)
772
          && GET_MODE (XEXP (op, 0)) == mode)
773
        return XEXP (op, 0);
774
 
775
      /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
776
         (OP:SI foo:SI) if OP is NEG or ABS.  */
777
      if ((GET_CODE (op) == ABS
778
           || GET_CODE (op) == NEG)
779
          && (GET_CODE (XEXP (op, 0)) == SIGN_EXTEND
780
              || GET_CODE (XEXP (op, 0)) == ZERO_EXTEND)
781
          && GET_MODE (XEXP (XEXP (op, 0), 0)) == mode)
782
        return simplify_gen_unary (GET_CODE (op), mode,
783
                                   XEXP (XEXP (op, 0), 0), mode);
784
 
785
      /* (truncate:A (subreg:B (truncate:C X) 0)) is
786
         (truncate:A X).  */
787
      if (GET_CODE (op) == SUBREG
788
          && GET_CODE (SUBREG_REG (op)) == TRUNCATE
789
          && subreg_lowpart_p (op))
790
        return simplify_gen_unary (TRUNCATE, mode, XEXP (SUBREG_REG (op), 0),
791
                                   GET_MODE (XEXP (SUBREG_REG (op), 0)));
792
 
793
      /* If we know that the value is already truncated, we can
794
         replace the TRUNCATE with a SUBREG.  Note that this is also
795
         valid if TRULY_NOOP_TRUNCATION is false for the corresponding
796
         modes we just have to apply a different definition for
797
         truncation.  But don't do this for an (LSHIFTRT (MULT ...))
798
         since this will cause problems with the umulXi3_highpart
799
         patterns.  */
800
      if ((TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
801
                                 GET_MODE_BITSIZE (GET_MODE (op)))
802
           ? (num_sign_bit_copies (op, GET_MODE (op))
803
              > (unsigned int) (GET_MODE_BITSIZE (GET_MODE (op))
804
                                - GET_MODE_BITSIZE (mode)))
805
           : truncated_to_mode (mode, op))
806
          && ! (GET_CODE (op) == LSHIFTRT
807
                && GET_CODE (XEXP (op, 0)) == MULT))
808
        return rtl_hooks.gen_lowpart_no_emit (mode, op);
809
 
810
      /* A truncate of a comparison can be replaced with a subreg if
811
         STORE_FLAG_VALUE permits.  This is like the previous test,
812
         but it works even if the comparison is done in a mode larger
813
         than HOST_BITS_PER_WIDE_INT.  */
814
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
815
          && COMPARISON_P (op)
816
          && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
817
        return rtl_hooks.gen_lowpart_no_emit (mode, op);
818
      break;
819
 
820
    case FLOAT_TRUNCATE:
821
      if (DECIMAL_FLOAT_MODE_P (mode))
822
        break;
823
 
824
      /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF.  */
825
      if (GET_CODE (op) == FLOAT_EXTEND
826
          && GET_MODE (XEXP (op, 0)) == mode)
827
        return XEXP (op, 0);
828
 
829
      /* (float_truncate:SF (float_truncate:DF foo:XF))
830
         = (float_truncate:SF foo:XF).
831
         This may eliminate double rounding, so it is unsafe.
832
 
833
         (float_truncate:SF (float_extend:XF foo:DF))
834
         = (float_truncate:SF foo:DF).
835
 
836
         (float_truncate:DF (float_extend:XF foo:SF))
837
         = (float_extend:SF foo:DF).  */
838
      if ((GET_CODE (op) == FLOAT_TRUNCATE
839
           && flag_unsafe_math_optimizations)
840
          || GET_CODE (op) == FLOAT_EXTEND)
841
        return simplify_gen_unary (GET_MODE_SIZE (GET_MODE (XEXP (op,
842
                                                            0)))
843
                                   > GET_MODE_SIZE (mode)
844
                                   ? FLOAT_TRUNCATE : FLOAT_EXTEND,
845
                                   mode,
846
                                   XEXP (op, 0), mode);
847
 
848
      /*  (float_truncate (float x)) is (float x)  */
849
      if (GET_CODE (op) == FLOAT
850
          && (flag_unsafe_math_optimizations
851
              || (SCALAR_FLOAT_MODE_P (GET_MODE (op))
852
                  && ((unsigned)significand_size (GET_MODE (op))
853
                      >= (GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0)))
854
                          - num_sign_bit_copies (XEXP (op, 0),
855
                                                 GET_MODE (XEXP (op, 0))))))))
856
        return simplify_gen_unary (FLOAT, mode,
857
                                   XEXP (op, 0),
858
                                   GET_MODE (XEXP (op, 0)));
859
 
860
      /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
861
         (OP:SF foo:SF) if OP is NEG or ABS.  */
862
      if ((GET_CODE (op) == ABS
863
           || GET_CODE (op) == NEG)
864
          && GET_CODE (XEXP (op, 0)) == FLOAT_EXTEND
865
          && GET_MODE (XEXP (XEXP (op, 0), 0)) == mode)
866
        return simplify_gen_unary (GET_CODE (op), mode,
867
                                   XEXP (XEXP (op, 0), 0), mode);
868
 
869
      /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
870
         is (float_truncate:SF x).  */
871
      if (GET_CODE (op) == SUBREG
872
          && subreg_lowpart_p (op)
873
          && GET_CODE (SUBREG_REG (op)) == FLOAT_TRUNCATE)
874
        return SUBREG_REG (op);
875
      break;
876
 
877
    case FLOAT_EXTEND:
878
      if (DECIMAL_FLOAT_MODE_P (mode))
879
        break;
880
 
881
      /*  (float_extend (float_extend x)) is (float_extend x)
882
 
883
          (float_extend (float x)) is (float x) assuming that double
884
          rounding can't happen.
885
          */
886
      if (GET_CODE (op) == FLOAT_EXTEND
887
          || (GET_CODE (op) == FLOAT
888
              && SCALAR_FLOAT_MODE_P (GET_MODE (op))
889
              && ((unsigned)significand_size (GET_MODE (op))
890
                  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0)))
891
                      - num_sign_bit_copies (XEXP (op, 0),
892
                                             GET_MODE (XEXP (op, 0)))))))
893
        return simplify_gen_unary (GET_CODE (op), mode,
894
                                   XEXP (op, 0),
895
                                   GET_MODE (XEXP (op, 0)));
896
 
897
      break;
898
 
899
    case ABS:
900
      /* (abs (neg <foo>)) -> (abs <foo>) */
901
      if (GET_CODE (op) == NEG)
902
        return simplify_gen_unary (ABS, mode, XEXP (op, 0),
903
                                   GET_MODE (XEXP (op, 0)));
904
 
905
      /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
906
         do nothing.  */
907
      if (GET_MODE (op) == VOIDmode)
908
        break;
909
 
910
      /* If operand is something known to be positive, ignore the ABS.  */
911
      if (GET_CODE (op) == FFS || GET_CODE (op) == ABS
912
          || ((GET_MODE_BITSIZE (GET_MODE (op))
913
               <= HOST_BITS_PER_WIDE_INT)
914
              && ((nonzero_bits (op, GET_MODE (op))
915
                   & ((HOST_WIDE_INT) 1
916
                      << (GET_MODE_BITSIZE (GET_MODE (op)) - 1)))
917
                  == 0)))
918
        return op;
919
 
920
      /* If operand is known to be only -1 or 0, convert ABS to NEG.  */
921
      if (num_sign_bit_copies (op, mode) == GET_MODE_BITSIZE (mode))
922
        return gen_rtx_NEG (mode, op);
923
 
924
      break;
925
 
926
    case FFS:
927
      /* (ffs (*_extend <X>)) = (ffs <X>) */
928
      if (GET_CODE (op) == SIGN_EXTEND
929
          || GET_CODE (op) == ZERO_EXTEND)
930
        return simplify_gen_unary (FFS, mode, XEXP (op, 0),
931
                                   GET_MODE (XEXP (op, 0)));
932
      break;
933
 
934
    case POPCOUNT:
935
      switch (GET_CODE (op))
936
        {
937
        case BSWAP:
938
        case ZERO_EXTEND:
939
          /* (popcount (zero_extend <X>)) = (popcount <X>) */
940
          return simplify_gen_unary (POPCOUNT, mode, XEXP (op, 0),
941
                                     GET_MODE (XEXP (op, 0)));
942
 
943
        case ROTATE:
944
        case ROTATERT:
945
          /* Rotations don't affect popcount.  */
946
          if (!side_effects_p (XEXP (op, 1)))
947
            return simplify_gen_unary (POPCOUNT, mode, XEXP (op, 0),
948
                                       GET_MODE (XEXP (op, 0)));
949
          break;
950
 
951
        default:
952
          break;
953
        }
954
      break;
955
 
956
    case PARITY:
957
      switch (GET_CODE (op))
958
        {
959
        case NOT:
960
        case BSWAP:
961
        case ZERO_EXTEND:
962
        case SIGN_EXTEND:
963
          return simplify_gen_unary (PARITY, mode, XEXP (op, 0),
964
                                     GET_MODE (XEXP (op, 0)));
965
 
966
        case ROTATE:
967
        case ROTATERT:
968
          /* Rotations don't affect parity.  */
969
          if (!side_effects_p (XEXP (op, 1)))
970
            return simplify_gen_unary (PARITY, mode, XEXP (op, 0),
971
                                       GET_MODE (XEXP (op, 0)));
972
          break;
973
 
974
        default:
975
          break;
976
        }
977
      break;
978
 
979
    case BSWAP:
980
      /* (bswap (bswap x)) -> x.  */
981
      if (GET_CODE (op) == BSWAP)
982
        return XEXP (op, 0);
983
      break;
984
 
985
    case FLOAT:
986
      /* (float (sign_extend <X>)) = (float <X>).  */
987
      if (GET_CODE (op) == SIGN_EXTEND)
988
        return simplify_gen_unary (FLOAT, mode, XEXP (op, 0),
989
                                   GET_MODE (XEXP (op, 0)));
990
      break;
991
 
992
    case SIGN_EXTEND:
993
      /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
994
         becomes just the MINUS if its mode is MODE.  This allows
995
         folding switch statements on machines using casesi (such as
996
         the VAX).  */
997
      if (GET_CODE (op) == TRUNCATE
998
          && GET_MODE (XEXP (op, 0)) == mode
999
          && GET_CODE (XEXP (op, 0)) == MINUS
1000
          && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
1001
          && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
1002
        return XEXP (op, 0);
1003
 
1004
      /* Check for a sign extension of a subreg of a promoted
1005
         variable, where the promotion is sign-extended, and the
1006
         target mode is the same as the variable's promotion.  */
1007
      if (GET_CODE (op) == SUBREG
1008
          && SUBREG_PROMOTED_VAR_P (op)
1009
          && ! SUBREG_PROMOTED_UNSIGNED_P (op)
1010
          && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (XEXP (op, 0))))
1011
        return rtl_hooks.gen_lowpart_no_emit (mode, op);
1012
 
1013
#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1014
      /* As we do not know which address space the pointer is refering to,
1015
         we can do this only if the target does not support different pointer
1016
         or address modes depending on the address space.  */
1017
      if (target_default_pointer_address_modes_p ()
1018
          && ! POINTERS_EXTEND_UNSIGNED
1019
          && mode == Pmode && GET_MODE (op) == ptr_mode
1020
          && (CONSTANT_P (op)
1021
              || (GET_CODE (op) == SUBREG
1022
                  && REG_P (SUBREG_REG (op))
1023
                  && REG_POINTER (SUBREG_REG (op))
1024
                  && GET_MODE (SUBREG_REG (op)) == Pmode)))
1025
        return convert_memory_address (Pmode, op);
1026
#endif
1027
      break;
1028
 
1029
    case ZERO_EXTEND:
1030
      /* Check for a zero extension of a subreg of a promoted
1031
         variable, where the promotion is zero-extended, and the
1032
         target mode is the same as the variable's promotion.  */
1033
      if (GET_CODE (op) == SUBREG
1034
          && SUBREG_PROMOTED_VAR_P (op)
1035
          && SUBREG_PROMOTED_UNSIGNED_P (op) > 0
1036
          && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (XEXP (op, 0))))
1037
        return rtl_hooks.gen_lowpart_no_emit (mode, op);
1038
 
1039
#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1040
      /* As we do not know which address space the pointer is refering to,
1041
         we can do this only if the target does not support different pointer
1042
         or address modes depending on the address space.  */
1043
      if (target_default_pointer_address_modes_p ()
1044
          && POINTERS_EXTEND_UNSIGNED > 0
1045
          && mode == Pmode && GET_MODE (op) == ptr_mode
1046
          && (CONSTANT_P (op)
1047
              || (GET_CODE (op) == SUBREG
1048
                  && REG_P (SUBREG_REG (op))
1049
                  && REG_POINTER (SUBREG_REG (op))
1050
                  && GET_MODE (SUBREG_REG (op)) == Pmode)))
1051
        return convert_memory_address (Pmode, op);
1052
#endif
1053
      break;
1054
 
1055
    default:
1056
      break;
1057
    }
1058
 
1059
  return 0;
1060
}
1061
 
1062
/* Try to compute the value of a unary operation CODE whose output mode is to
1063
   be MODE with input operand OP whose mode was originally OP_MODE.
1064
   Return zero if the value cannot be computed.  */
1065
rtx
1066
simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
1067
                                rtx op, enum machine_mode op_mode)
1068
{
1069
  unsigned int width = GET_MODE_BITSIZE (mode);
1070
 
1071
  if (code == VEC_DUPLICATE)
1072
    {
1073
      gcc_assert (VECTOR_MODE_P (mode));
1074
      if (GET_MODE (op) != VOIDmode)
1075
      {
1076
        if (!VECTOR_MODE_P (GET_MODE (op)))
1077
          gcc_assert (GET_MODE_INNER (mode) == GET_MODE (op));
1078
        else
1079
          gcc_assert (GET_MODE_INNER (mode) == GET_MODE_INNER
1080
                                                (GET_MODE (op)));
1081
      }
1082
      if (CONST_INT_P (op) || GET_CODE (op) == CONST_DOUBLE
1083
          || GET_CODE (op) == CONST_VECTOR)
1084
        {
1085
          int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
1086
          unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
1087
          rtvec v = rtvec_alloc (n_elts);
1088
          unsigned int i;
1089
 
1090
          if (GET_CODE (op) != CONST_VECTOR)
1091
            for (i = 0; i < n_elts; i++)
1092
              RTVEC_ELT (v, i) = op;
1093
          else
1094
            {
1095
              enum machine_mode inmode = GET_MODE (op);
1096
              int in_elt_size = GET_MODE_SIZE (GET_MODE_INNER (inmode));
1097
              unsigned in_n_elts = (GET_MODE_SIZE (inmode) / in_elt_size);
1098
 
1099
              gcc_assert (in_n_elts < n_elts);
1100
              gcc_assert ((n_elts % in_n_elts) == 0);
1101
              for (i = 0; i < n_elts; i++)
1102
                RTVEC_ELT (v, i) = CONST_VECTOR_ELT (op, i % in_n_elts);
1103
            }
1104
          return gen_rtx_CONST_VECTOR (mode, v);
1105
        }
1106
    }
1107
 
1108
  if (VECTOR_MODE_P (mode) && GET_CODE (op) == CONST_VECTOR)
1109
    {
1110
      int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
1111
      unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
1112
      enum machine_mode opmode = GET_MODE (op);
1113
      int op_elt_size = GET_MODE_SIZE (GET_MODE_INNER (opmode));
1114
      unsigned op_n_elts = (GET_MODE_SIZE (opmode) / op_elt_size);
1115
      rtvec v = rtvec_alloc (n_elts);
1116
      unsigned int i;
1117
 
1118
      gcc_assert (op_n_elts == n_elts);
1119
      for (i = 0; i < n_elts; i++)
1120
        {
1121
          rtx x = simplify_unary_operation (code, GET_MODE_INNER (mode),
1122
                                            CONST_VECTOR_ELT (op, i),
1123
                                            GET_MODE_INNER (opmode));
1124
          if (!x)
1125
            return 0;
1126
          RTVEC_ELT (v, i) = x;
1127
        }
1128
      return gen_rtx_CONST_VECTOR (mode, v);
1129
    }
1130
 
1131
  /* The order of these tests is critical so that, for example, we don't
1132
     check the wrong mode (input vs. output) for a conversion operation,
1133
     such as FIX.  At some point, this should be simplified.  */
1134
 
1135
  if (code == FLOAT && GET_MODE (op) == VOIDmode
1136
      && (GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op)))
1137
    {
1138
      HOST_WIDE_INT hv, lv;
1139
      REAL_VALUE_TYPE d;
1140
 
1141
      if (CONST_INT_P (op))
1142
        lv = INTVAL (op), hv = HWI_SIGN_EXTEND (lv);
1143
      else
1144
        lv = CONST_DOUBLE_LOW (op),  hv = CONST_DOUBLE_HIGH (op);
1145
 
1146
      REAL_VALUE_FROM_INT (d, lv, hv, mode);
1147
      d = real_value_truncate (mode, d);
1148
      return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
1149
    }
1150
  else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
1151
           && (GET_CODE (op) == CONST_DOUBLE
1152
               || CONST_INT_P (op)))
1153
    {
1154
      HOST_WIDE_INT hv, lv;
1155
      REAL_VALUE_TYPE d;
1156
 
1157
      if (CONST_INT_P (op))
1158
        lv = INTVAL (op), hv = HWI_SIGN_EXTEND (lv);
1159
      else
1160
        lv = CONST_DOUBLE_LOW (op),  hv = CONST_DOUBLE_HIGH (op);
1161
 
1162
      if (op_mode == VOIDmode)
1163
        {
1164
          /* We don't know how to interpret negative-looking numbers in
1165
             this case, so don't try to fold those.  */
1166
          if (hv < 0)
1167
            return 0;
1168
        }
1169
      else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
1170
        ;
1171
      else
1172
        hv = 0, lv &= GET_MODE_MASK (op_mode);
1173
 
1174
      REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
1175
      d = real_value_truncate (mode, d);
1176
      return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
1177
    }
1178
 
1179
  if (CONST_INT_P (op)
1180
      && width <= HOST_BITS_PER_WIDE_INT && width > 0)
1181
    {
1182
      HOST_WIDE_INT arg0 = INTVAL (op);
1183
      HOST_WIDE_INT val;
1184
 
1185
      switch (code)
1186
        {
1187
        case NOT:
1188
          val = ~ arg0;
1189
          break;
1190
 
1191
        case NEG:
1192
          val = - arg0;
1193
          break;
1194
 
1195
        case ABS:
1196
          val = (arg0 >= 0 ? arg0 : - arg0);
1197
          break;
1198
 
1199
        case FFS:
1200
          /* Don't use ffs here.  Instead, get low order bit and then its
1201
             number.  If arg0 is zero, this will return 0, as desired.  */
1202
          arg0 &= GET_MODE_MASK (mode);
1203
          val = exact_log2 (arg0 & (- arg0)) + 1;
1204
          break;
1205
 
1206
        case CLZ:
1207
          arg0 &= GET_MODE_MASK (mode);
1208
          if (arg0 == 0 && CLZ_DEFINED_VALUE_AT_ZERO (mode, val))
1209
            ;
1210
          else
1211
            val = GET_MODE_BITSIZE (mode) - floor_log2 (arg0) - 1;
1212
          break;
1213
 
1214
        case CTZ:
1215
          arg0 &= GET_MODE_MASK (mode);
1216
          if (arg0 == 0)
1217
            {
1218
              /* Even if the value at zero is undefined, we have to come
1219
                 up with some replacement.  Seems good enough.  */
1220
              if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, val))
1221
                val = GET_MODE_BITSIZE (mode);
1222
            }
1223
          else
1224
            val = exact_log2 (arg0 & -arg0);
1225
          break;
1226
 
1227
        case POPCOUNT:
1228
          arg0 &= GET_MODE_MASK (mode);
1229
          val = 0;
1230
          while (arg0)
1231
            val++, arg0 &= arg0 - 1;
1232
          break;
1233
 
1234
        case PARITY:
1235
          arg0 &= GET_MODE_MASK (mode);
1236
          val = 0;
1237
          while (arg0)
1238
            val++, arg0 &= arg0 - 1;
1239
          val &= 1;
1240
          break;
1241
 
1242
        case BSWAP:
1243
          {
1244
            unsigned int s;
1245
 
1246
            val = 0;
1247
            for (s = 0; s < width; s += 8)
1248
              {
1249
                unsigned int d = width - s - 8;
1250
                unsigned HOST_WIDE_INT byte;
1251
                byte = (arg0 >> s) & 0xff;
1252
                val |= byte << d;
1253
              }
1254
          }
1255
          break;
1256
 
1257
        case TRUNCATE:
1258
          val = arg0;
1259
          break;
1260
 
1261
        case ZERO_EXTEND:
1262
          /* When zero-extending a CONST_INT, we need to know its
1263
             original mode.  */
1264
          gcc_assert (op_mode != VOIDmode);
1265
          if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
1266
            {
1267
              /* If we were really extending the mode,
1268
                 we would have to distinguish between zero-extension
1269
                 and sign-extension.  */
1270
              gcc_assert (width == GET_MODE_BITSIZE (op_mode));
1271
              val = arg0;
1272
            }
1273
          else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
1274
            val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
1275
          else
1276
            return 0;
1277
          break;
1278
 
1279
        case SIGN_EXTEND:
1280
          if (op_mode == VOIDmode)
1281
            op_mode = mode;
1282
          if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
1283
            {
1284
              /* If we were really extending the mode,
1285
                 we would have to distinguish between zero-extension
1286
                 and sign-extension.  */
1287
              gcc_assert (width == GET_MODE_BITSIZE (op_mode));
1288
              val = arg0;
1289
            }
1290
          else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
1291
            {
1292
              val
1293
                = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
1294
              if (val
1295
                  & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
1296
                val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
1297
            }
1298
          else
1299
            return 0;
1300
          break;
1301
 
1302
        case SQRT:
1303
        case FLOAT_EXTEND:
1304
        case FLOAT_TRUNCATE:
1305
        case SS_TRUNCATE:
1306
        case US_TRUNCATE:
1307
        case SS_NEG:
1308
        case US_NEG:
1309
        case SS_ABS:
1310
          return 0;
1311
 
1312
        default:
1313
          gcc_unreachable ();
1314
        }
1315
 
1316
      return gen_int_mode (val, mode);
1317
    }
1318
 
1319
  /* We can do some operations on integer CONST_DOUBLEs.  Also allow
1320
     for a DImode operation on a CONST_INT.  */
1321
  else if (GET_MODE (op) == VOIDmode
1322
           && width <= HOST_BITS_PER_WIDE_INT * 2
1323
           && (GET_CODE (op) == CONST_DOUBLE
1324
               || CONST_INT_P (op)))
1325
    {
1326
      unsigned HOST_WIDE_INT l1, lv;
1327
      HOST_WIDE_INT h1, hv;
1328
 
1329
      if (GET_CODE (op) == CONST_DOUBLE)
1330
        l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
1331
      else
1332
        l1 = INTVAL (op), h1 = HWI_SIGN_EXTEND (l1);
1333
 
1334
      switch (code)
1335
        {
1336
        case NOT:
1337
          lv = ~ l1;
1338
          hv = ~ h1;
1339
          break;
1340
 
1341
        case NEG:
1342
          neg_double (l1, h1, &lv, &hv);
1343
          break;
1344
 
1345
        case ABS:
1346
          if (h1 < 0)
1347
            neg_double (l1, h1, &lv, &hv);
1348
          else
1349
            lv = l1, hv = h1;
1350
          break;
1351
 
1352
        case FFS:
1353
          hv = 0;
1354
          if (l1 == 0)
1355
            {
1356
              if (h1 == 0)
1357
                lv = 0;
1358
              else
1359
                lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & -h1) + 1;
1360
            }
1361
          else
1362
            lv = exact_log2 (l1 & -l1) + 1;
1363
          break;
1364
 
1365
        case CLZ:
1366
          hv = 0;
1367
          if (h1 != 0)
1368
            lv = GET_MODE_BITSIZE (mode) - floor_log2 (h1) - 1
1369
              - HOST_BITS_PER_WIDE_INT;
1370
          else if (l1 != 0)
1371
            lv = GET_MODE_BITSIZE (mode) - floor_log2 (l1) - 1;
1372
          else if (! CLZ_DEFINED_VALUE_AT_ZERO (mode, lv))
1373
            lv = GET_MODE_BITSIZE (mode);
1374
          break;
1375
 
1376
        case CTZ:
1377
          hv = 0;
1378
          if (l1 != 0)
1379
            lv = exact_log2 (l1 & -l1);
1380
          else if (h1 != 0)
1381
            lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & -h1);
1382
          else if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, lv))
1383
            lv = GET_MODE_BITSIZE (mode);
1384
          break;
1385
 
1386
        case POPCOUNT:
1387
          hv = 0;
1388
          lv = 0;
1389
          while (l1)
1390
            lv++, l1 &= l1 - 1;
1391
          while (h1)
1392
            lv++, h1 &= h1 - 1;
1393
          break;
1394
 
1395
        case PARITY:
1396
          hv = 0;
1397
          lv = 0;
1398
          while (l1)
1399
            lv++, l1 &= l1 - 1;
1400
          while (h1)
1401
            lv++, h1 &= h1 - 1;
1402
          lv &= 1;
1403
          break;
1404
 
1405
        case BSWAP:
1406
          {
1407
            unsigned int s;
1408
 
1409
            hv = 0;
1410
            lv = 0;
1411
            for (s = 0; s < width; s += 8)
1412
              {
1413
                unsigned int d = width - s - 8;
1414
                unsigned HOST_WIDE_INT byte;
1415
 
1416
                if (s < HOST_BITS_PER_WIDE_INT)
1417
                  byte = (l1 >> s) & 0xff;
1418
                else
1419
                  byte = (h1 >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
1420
 
1421
                if (d < HOST_BITS_PER_WIDE_INT)
1422
                  lv |= byte << d;
1423
                else
1424
                  hv |= byte << (d - HOST_BITS_PER_WIDE_INT);
1425
              }
1426
          }
1427
          break;
1428
 
1429
        case TRUNCATE:
1430
          /* This is just a change-of-mode, so do nothing.  */
1431
          lv = l1, hv = h1;
1432
          break;
1433
 
1434
        case ZERO_EXTEND:
1435
          gcc_assert (op_mode != VOIDmode);
1436
 
1437
          if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
1438
            return 0;
1439
 
1440
          hv = 0;
1441
          lv = l1 & GET_MODE_MASK (op_mode);
1442
          break;
1443
 
1444
        case SIGN_EXTEND:
1445
          if (op_mode == VOIDmode
1446
              || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
1447
            return 0;
1448
          else
1449
            {
1450
              lv = l1 & GET_MODE_MASK (op_mode);
1451
              if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
1452
                  && (lv & ((HOST_WIDE_INT) 1
1453
                            << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
1454
                lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
1455
 
1456
              hv = HWI_SIGN_EXTEND (lv);
1457
            }
1458
          break;
1459
 
1460
        case SQRT:
1461
          return 0;
1462
 
1463
        default:
1464
          return 0;
1465
        }
1466
 
1467
      return immed_double_const (lv, hv, mode);
1468
    }
1469
 
1470
  else if (GET_CODE (op) == CONST_DOUBLE
1471
           && SCALAR_FLOAT_MODE_P (mode))
1472
    {
1473
      REAL_VALUE_TYPE d, t;
1474
      REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1475
 
1476
      switch (code)
1477
        {
1478
        case SQRT:
1479
          if (HONOR_SNANS (mode) && real_isnan (&d))
1480
            return 0;
1481
          real_sqrt (&t, mode, &d);
1482
          d = t;
1483
          break;
1484
        case ABS:
1485
          d = REAL_VALUE_ABS (d);
1486
          break;
1487
        case NEG:
1488
          d = REAL_VALUE_NEGATE (d);
1489
          break;
1490
        case FLOAT_TRUNCATE:
1491
          d = real_value_truncate (mode, d);
1492
          break;
1493
        case FLOAT_EXTEND:
1494
          /* All this does is change the mode.  */
1495
          break;
1496
        case FIX:
1497
          real_arithmetic (&d, FIX_TRUNC_EXPR, &d, NULL);
1498
          break;
1499
        case NOT:
1500
          {
1501
            long tmp[4];
1502
            int i;
1503
 
1504
            real_to_target (tmp, &d, GET_MODE (op));
1505
            for (i = 0; i < 4; i++)
1506
              tmp[i] = ~tmp[i];
1507
            real_from_target (&d, tmp, mode);
1508
            break;
1509
          }
1510
        default:
1511
          gcc_unreachable ();
1512
        }
1513
      return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
1514
    }
1515
 
1516
  else if (GET_CODE (op) == CONST_DOUBLE
1517
           && SCALAR_FLOAT_MODE_P (GET_MODE (op))
1518
           && GET_MODE_CLASS (mode) == MODE_INT
1519
           && width <= 2*HOST_BITS_PER_WIDE_INT && width > 0)
1520
    {
1521
      /* Although the overflow semantics of RTL's FIX and UNSIGNED_FIX
1522
         operators are intentionally left unspecified (to ease implementation
1523
         by target backends), for consistency, this routine implements the
1524
         same semantics for constant folding as used by the middle-end.  */
1525
 
1526
      /* This was formerly used only for non-IEEE float.
1527
         eggert@twinsun.com says it is safe for IEEE also.  */
1528
      HOST_WIDE_INT xh, xl, th, tl;
1529
      REAL_VALUE_TYPE x, t;
1530
      REAL_VALUE_FROM_CONST_DOUBLE (x, op);
1531
      switch (code)
1532
        {
1533
        case FIX:
1534
          if (REAL_VALUE_ISNAN (x))
1535
            return const0_rtx;
1536
 
1537
          /* Test against the signed upper bound.  */
1538
          if (width > HOST_BITS_PER_WIDE_INT)
1539
            {
1540
              th = ((unsigned HOST_WIDE_INT) 1
1541
                    << (width - HOST_BITS_PER_WIDE_INT - 1)) - 1;
1542
              tl = -1;
1543
            }
1544
          else
1545
            {
1546
              th = 0;
1547
              tl = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
1548
            }
1549
          real_from_integer (&t, VOIDmode, tl, th, 0);
1550
          if (REAL_VALUES_LESS (t, x))
1551
            {
1552
              xh = th;
1553
              xl = tl;
1554
              break;
1555
            }
1556
 
1557
          /* Test against the signed lower bound.  */
1558
          if (width > HOST_BITS_PER_WIDE_INT)
1559
            {
1560
              th = (HOST_WIDE_INT) -1 << (width - HOST_BITS_PER_WIDE_INT - 1);
1561
              tl = 0;
1562
            }
1563
          else
1564
            {
1565
              th = -1;
1566
              tl = (HOST_WIDE_INT) -1 << (width - 1);
1567
            }
1568
          real_from_integer (&t, VOIDmode, tl, th, 0);
1569
          if (REAL_VALUES_LESS (x, t))
1570
            {
1571
              xh = th;
1572
              xl = tl;
1573
              break;
1574
            }
1575
          REAL_VALUE_TO_INT (&xl, &xh, x);
1576
          break;
1577
 
1578
        case UNSIGNED_FIX:
1579
          if (REAL_VALUE_ISNAN (x) || REAL_VALUE_NEGATIVE (x))
1580
            return const0_rtx;
1581
 
1582
          /* Test against the unsigned upper bound.  */
1583
          if (width == 2*HOST_BITS_PER_WIDE_INT)
1584
            {
1585
              th = -1;
1586
              tl = -1;
1587
            }
1588
          else if (width >= HOST_BITS_PER_WIDE_INT)
1589
            {
1590
              th = ((unsigned HOST_WIDE_INT) 1
1591
                    << (width - HOST_BITS_PER_WIDE_INT)) - 1;
1592
              tl = -1;
1593
            }
1594
          else
1595
            {
1596
              th = 0;
1597
              tl = ((unsigned HOST_WIDE_INT) 1 << width) - 1;
1598
            }
1599
          real_from_integer (&t, VOIDmode, tl, th, 1);
1600
          if (REAL_VALUES_LESS (t, x))
1601
            {
1602
              xh = th;
1603
              xl = tl;
1604
              break;
1605
            }
1606
 
1607
          REAL_VALUE_TO_INT (&xl, &xh, x);
1608
          break;
1609
 
1610
        default:
1611
          gcc_unreachable ();
1612
        }
1613
      return immed_double_const (xl, xh, mode);
1614
    }
1615
 
1616
  return NULL_RTX;
1617
}
1618
 
1619
/* Subroutine of simplify_binary_operation to simplify a commutative,
1620
   associative binary operation CODE with result mode MODE, operating
1621
   on OP0 and OP1.  CODE is currently one of PLUS, MULT, AND, IOR, XOR,
1622
   SMIN, SMAX, UMIN or UMAX.  Return zero if no simplification or
1623
   canonicalization is possible.  */
1624
 
1625
static rtx
1626
simplify_associative_operation (enum rtx_code code, enum machine_mode mode,
1627
                                rtx op0, rtx op1)
1628
{
1629
  rtx tem;
1630
 
1631
  /* Linearize the operator to the left.  */
1632
  if (GET_CODE (op1) == code)
1633
    {
1634
      /* "(a op b) op (c op d)" becomes "((a op b) op c) op d)".  */
1635
      if (GET_CODE (op0) == code)
1636
        {
1637
          tem = simplify_gen_binary (code, mode, op0, XEXP (op1, 0));
1638
          return simplify_gen_binary (code, mode, tem, XEXP (op1, 1));
1639
        }
1640
 
1641
      /* "a op (b op c)" becomes "(b op c) op a".  */
1642
      if (! swap_commutative_operands_p (op1, op0))
1643
        return simplify_gen_binary (code, mode, op1, op0);
1644
 
1645
      tem = op0;
1646
      op0 = op1;
1647
      op1 = tem;
1648
    }
1649
 
1650
  if (GET_CODE (op0) == code)
1651
    {
1652
      /* Canonicalize "(x op c) op y" as "(x op y) op c".  */
1653
      if (swap_commutative_operands_p (XEXP (op0, 1), op1))
1654
        {
1655
          tem = simplify_gen_binary (code, mode, XEXP (op0, 0), op1);
1656
          return simplify_gen_binary (code, mode, tem, XEXP (op0, 1));
1657
        }
1658
 
1659
      /* Attempt to simplify "(a op b) op c" as "a op (b op c)".  */
1660
      tem = simplify_binary_operation (code, mode, XEXP (op0, 1), op1);
1661
      if (tem != 0)
1662
        return simplify_gen_binary (code, mode, XEXP (op0, 0), tem);
1663
 
1664
      /* Attempt to simplify "(a op b) op c" as "(a op c) op b".  */
1665
      tem = simplify_binary_operation (code, mode, XEXP (op0, 0), op1);
1666
      if (tem != 0)
1667
        return simplify_gen_binary (code, mode, tem, XEXP (op0, 1));
1668
    }
1669
 
1670
  return 0;
1671
}
1672
 
1673
 
1674
/* Simplify a binary operation CODE with result mode MODE, operating on OP0
1675
   and OP1.  Return 0 if no simplification is possible.
1676
 
1677
   Don't use this for relational operations such as EQ or LT.
1678
   Use simplify_relational_operation instead.  */
1679
rtx
1680
simplify_binary_operation (enum rtx_code code, enum machine_mode mode,
1681
                           rtx op0, rtx op1)
1682
{
1683
  rtx trueop0, trueop1;
1684
  rtx tem;
1685
 
1686
  /* Relational operations don't work here.  We must know the mode
1687
     of the operands in order to do the comparison correctly.
1688
     Assuming a full word can give incorrect results.
1689
     Consider comparing 128 with -128 in QImode.  */
1690
  gcc_assert (GET_RTX_CLASS (code) != RTX_COMPARE);
1691
  gcc_assert (GET_RTX_CLASS (code) != RTX_COMM_COMPARE);
1692
 
1693
  /* Make sure the constant is second.  */
1694
  if (GET_RTX_CLASS (code) == RTX_COMM_ARITH
1695
      && swap_commutative_operands_p (op0, op1))
1696
    {
1697
      tem = op0, op0 = op1, op1 = tem;
1698
    }
1699
 
1700
  trueop0 = avoid_constant_pool_reference (op0);
1701
  trueop1 = avoid_constant_pool_reference (op1);
1702
 
1703
  tem = simplify_const_binary_operation (code, mode, trueop0, trueop1);
1704
  if (tem)
1705
    return tem;
1706
  return simplify_binary_operation_1 (code, mode, op0, op1, trueop0, trueop1);
1707
}
1708
 
1709
/* Subroutine of simplify_binary_operation.  Simplify a binary operation
1710
   CODE with result mode MODE, operating on OP0 and OP1.  If OP0 and/or
1711
   OP1 are constant pool references, TRUEOP0 and TRUEOP1 represent the
1712
   actual constants.  */
1713
 
1714
static rtx
1715
simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
1716
                             rtx op0, rtx op1, rtx trueop0, rtx trueop1)
1717
{
1718
  rtx tem, reversed, opleft, opright;
1719
  HOST_WIDE_INT val;
1720
  unsigned int width = GET_MODE_BITSIZE (mode);
1721
 
1722
  /* Even if we can't compute a constant result,
1723
     there are some cases worth simplifying.  */
1724
 
1725
  switch (code)
1726
    {
1727
    case PLUS:
1728
      /* Maybe simplify x + 0 to x.  The two expressions are equivalent
1729
         when x is NaN, infinite, or finite and nonzero.  They aren't
1730
         when x is -0 and the rounding mode is not towards -infinity,
1731
         since (-0) + 0 is then 0.  */
1732
      if (!HONOR_SIGNED_ZEROS (mode) && trueop1 == CONST0_RTX (mode))
1733
        return op0;
1734
 
1735
      /* ((-a) + b) -> (b - a) and similarly for (a + (-b)).  These
1736
         transformations are safe even for IEEE.  */
1737
      if (GET_CODE (op0) == NEG)
1738
        return simplify_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
1739
      else if (GET_CODE (op1) == NEG)
1740
        return simplify_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
1741
 
1742
      /* (~a) + 1 -> -a */
1743
      if (INTEGRAL_MODE_P (mode)
1744
          && GET_CODE (op0) == NOT
1745
          && trueop1 == const1_rtx)
1746
        return simplify_gen_unary (NEG, mode, XEXP (op0, 0), mode);
1747
 
1748
      /* Handle both-operands-constant cases.  We can only add
1749
         CONST_INTs to constants since the sum of relocatable symbols
1750
         can't be handled by most assemblers.  Don't add CONST_INT
1751
         to CONST_INT since overflow won't be computed properly if wider
1752
         than HOST_BITS_PER_WIDE_INT.  */
1753
 
1754
      if ((GET_CODE (op0) == CONST
1755
           || GET_CODE (op0) == SYMBOL_REF
1756
           || GET_CODE (op0) == LABEL_REF)
1757
          && CONST_INT_P (op1))
1758
        return plus_constant (op0, INTVAL (op1));
1759
      else if ((GET_CODE (op1) == CONST
1760
                || GET_CODE (op1) == SYMBOL_REF
1761
                || GET_CODE (op1) == LABEL_REF)
1762
               && CONST_INT_P (op0))
1763
        return plus_constant (op1, INTVAL (op0));
1764
 
1765
      /* See if this is something like X * C - X or vice versa or
1766
         if the multiplication is written as a shift.  If so, we can
1767
         distribute and make a new multiply, shift, or maybe just
1768
         have X (if C is 2 in the example above).  But don't make
1769
         something more expensive than we had before.  */
1770
 
1771
      if (SCALAR_INT_MODE_P (mode))
1772
        {
1773
          HOST_WIDE_INT coeff0h = 0, coeff1h = 0;
1774
          unsigned HOST_WIDE_INT coeff0l = 1, coeff1l = 1;
1775
          rtx lhs = op0, rhs = op1;
1776
 
1777
          if (GET_CODE (lhs) == NEG)
1778
            {
1779
              coeff0l = -1;
1780
              coeff0h = -1;
1781
              lhs = XEXP (lhs, 0);
1782
            }
1783
          else if (GET_CODE (lhs) == MULT
1784
                   && CONST_INT_P (XEXP (lhs, 1)))
1785
            {
1786
              coeff0l = INTVAL (XEXP (lhs, 1));
1787
              coeff0h = INTVAL (XEXP (lhs, 1)) < 0 ? -1 : 0;
1788
              lhs = XEXP (lhs, 0);
1789
            }
1790
          else if (GET_CODE (lhs) == ASHIFT
1791
                   && CONST_INT_P (XEXP (lhs, 1))
1792
                   && INTVAL (XEXP (lhs, 1)) >= 0
1793
                   && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
1794
            {
1795
              coeff0l = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
1796
              coeff0h = 0;
1797
              lhs = XEXP (lhs, 0);
1798
            }
1799
 
1800
          if (GET_CODE (rhs) == NEG)
1801
            {
1802
              coeff1l = -1;
1803
              coeff1h = -1;
1804
              rhs = XEXP (rhs, 0);
1805
            }
1806
          else if (GET_CODE (rhs) == MULT
1807
                   && CONST_INT_P (XEXP (rhs, 1)))
1808
            {
1809
              coeff1l = INTVAL (XEXP (rhs, 1));
1810
              coeff1h = INTVAL (XEXP (rhs, 1)) < 0 ? -1 : 0;
1811
              rhs = XEXP (rhs, 0);
1812
            }
1813
          else if (GET_CODE (rhs) == ASHIFT
1814
                   && CONST_INT_P (XEXP (rhs, 1))
1815
                   && INTVAL (XEXP (rhs, 1)) >= 0
1816
                   && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
1817
            {
1818
              coeff1l = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
1819
              coeff1h = 0;
1820
              rhs = XEXP (rhs, 0);
1821
            }
1822
 
1823
          if (rtx_equal_p (lhs, rhs))
1824
            {
1825
              rtx orig = gen_rtx_PLUS (mode, op0, op1);
1826
              rtx coeff;
1827
              unsigned HOST_WIDE_INT l;
1828
              HOST_WIDE_INT h;
1829
              bool speed = optimize_function_for_speed_p (cfun);
1830
 
1831
              add_double (coeff0l, coeff0h, coeff1l, coeff1h, &l, &h);
1832
              coeff = immed_double_const (l, h, mode);
1833
 
1834
              tem = simplify_gen_binary (MULT, mode, lhs, coeff);
1835
              return rtx_cost (tem, SET, speed) <= rtx_cost (orig, SET, speed)
1836
                ? tem : 0;
1837
            }
1838
        }
1839
 
1840
      /* (plus (xor X C1) C2) is (xor X (C1^C2)) if C2 is signbit.  */
1841
      if ((CONST_INT_P (op1)
1842
           || GET_CODE (op1) == CONST_DOUBLE)
1843
          && GET_CODE (op0) == XOR
1844
          && (CONST_INT_P (XEXP (op0, 1))
1845
              || GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE)
1846
          && mode_signbit_p (mode, op1))
1847
        return simplify_gen_binary (XOR, mode, XEXP (op0, 0),
1848
                                    simplify_gen_binary (XOR, mode, op1,
1849
                                                         XEXP (op0, 1)));
1850
 
1851
      /* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)).  */
1852
      if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode)
1853
          && GET_CODE (op0) == MULT
1854
          && GET_CODE (XEXP (op0, 0)) == NEG)
1855
        {
1856
          rtx in1, in2;
1857
 
1858
          in1 = XEXP (XEXP (op0, 0), 0);
1859
          in2 = XEXP (op0, 1);
1860
          return simplify_gen_binary (MINUS, mode, op1,
1861
                                      simplify_gen_binary (MULT, mode,
1862
                                                           in1, in2));
1863
        }
1864
 
1865
      /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
1866
         C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
1867
         is 1.  */
1868
      if (COMPARISON_P (op0)
1869
          && ((STORE_FLAG_VALUE == -1 && trueop1 == const1_rtx)
1870
              || (STORE_FLAG_VALUE == 1 && trueop1 == constm1_rtx))
1871
          && (reversed = reversed_comparison (op0, mode)))
1872
        return
1873
          simplify_gen_unary (NEG, mode, reversed, mode);
1874
 
1875
      /* If one of the operands is a PLUS or a MINUS, see if we can
1876
         simplify this by the associative law.
1877
         Don't use the associative law for floating point.
1878
         The inaccuracy makes it nonassociative,
1879
         and subtle programs can break if operations are associated.  */
1880
 
1881
      if (INTEGRAL_MODE_P (mode)
1882
          && (plus_minus_operand_p (op0)
1883
              || plus_minus_operand_p (op1))
1884
          && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
1885
        return tem;
1886
 
1887
      /* Reassociate floating point addition only when the user
1888
         specifies associative math operations.  */
1889
      if (FLOAT_MODE_P (mode)
1890
          && flag_associative_math)
1891
        {
1892
          tem = simplify_associative_operation (code, mode, op0, op1);
1893
          if (tem)
1894
            return tem;
1895
        }
1896
      break;
1897
 
1898
    case COMPARE:
1899
      /* Convert (compare (gt (flags) 0) (lt (flags) 0)) to (flags).  */
1900
      if (((GET_CODE (op0) == GT && GET_CODE (op1) == LT)
1901
           || (GET_CODE (op0) == GTU && GET_CODE (op1) == LTU))
1902
          && XEXP (op0, 1) == const0_rtx && XEXP (op1, 1) == const0_rtx)
1903
        {
1904
          rtx xop00 = XEXP (op0, 0);
1905
          rtx xop10 = XEXP (op1, 0);
1906
 
1907
#ifdef HAVE_cc0
1908
          if (GET_CODE (xop00) == CC0 && GET_CODE (xop10) == CC0)
1909
#else
1910
            if (REG_P (xop00) && REG_P (xop10)
1911
                && GET_MODE (xop00) == GET_MODE (xop10)
1912
                && REGNO (xop00) == REGNO (xop10)
1913
                && GET_MODE_CLASS (GET_MODE (xop00)) == MODE_CC
1914
                && GET_MODE_CLASS (GET_MODE (xop10)) == MODE_CC)
1915
#endif
1916
              return xop00;
1917
        }
1918
      break;
1919
 
1920
    case MINUS:
1921
      /* We can't assume x-x is 0 even with non-IEEE floating point,
1922
         but since it is zero except in very strange circumstances, we
1923
         will treat it as zero with -ffinite-math-only.  */
1924
      if (rtx_equal_p (trueop0, trueop1)
1925
          && ! side_effects_p (op0)
1926
          && (!FLOAT_MODE_P (mode) || !HONOR_NANS (mode)))
1927
        return CONST0_RTX (mode);
1928
 
1929
      /* Change subtraction from zero into negation.  (0 - x) is the
1930
         same as -x when x is NaN, infinite, or finite and nonzero.
1931
         But if the mode has signed zeros, and does not round towards
1932
         -infinity, then 0 - 0 is 0, not -0.  */
1933
      if (!HONOR_SIGNED_ZEROS (mode) && trueop0 == CONST0_RTX (mode))
1934
        return simplify_gen_unary (NEG, mode, op1, mode);
1935
 
1936
      /* (-1 - a) is ~a.  */
1937
      if (trueop0 == constm1_rtx)
1938
        return simplify_gen_unary (NOT, mode, op1, mode);
1939
 
1940
      /* Subtracting 0 has no effect unless the mode has signed zeros
1941
         and supports rounding towards -infinity.  In such a case,
1942
 
1943
      if (!(HONOR_SIGNED_ZEROS (mode)
1944
            && HONOR_SIGN_DEPENDENT_ROUNDING (mode))
1945
          && trueop1 == CONST0_RTX (mode))
1946
        return op0;
1947
 
1948
      /* See if this is something like X * C - X or vice versa or
1949
         if the multiplication is written as a shift.  If so, we can
1950
         distribute and make a new multiply, shift, or maybe just
1951
         have X (if C is 2 in the example above).  But don't make
1952
         something more expensive than we had before.  */
1953
 
1954
      if (SCALAR_INT_MODE_P (mode))
1955
        {
1956
          HOST_WIDE_INT coeff0h = 0, negcoeff1h = -1;
1957
          unsigned HOST_WIDE_INT coeff0l = 1, negcoeff1l = -1;
1958
          rtx lhs = op0, rhs = op1;
1959
 
1960
          if (GET_CODE (lhs) == NEG)
1961
            {
1962
              coeff0l = -1;
1963
              coeff0h = -1;
1964
              lhs = XEXP (lhs, 0);
1965
            }
1966
          else if (GET_CODE (lhs) == MULT
1967
                   && CONST_INT_P (XEXP (lhs, 1)))
1968
            {
1969
              coeff0l = INTVAL (XEXP (lhs, 1));
1970
              coeff0h = INTVAL (XEXP (lhs, 1)) < 0 ? -1 : 0;
1971
              lhs = XEXP (lhs, 0);
1972
            }
1973
          else if (GET_CODE (lhs) == ASHIFT
1974
                   && CONST_INT_P (XEXP (lhs, 1))
1975
                   && INTVAL (XEXP (lhs, 1)) >= 0
1976
                   && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
1977
            {
1978
              coeff0l = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
1979
              coeff0h = 0;
1980
              lhs = XEXP (lhs, 0);
1981
            }
1982
 
1983
          if (GET_CODE (rhs) == NEG)
1984
            {
1985
              negcoeff1l = 1;
1986
              negcoeff1h = 0;
1987
              rhs = XEXP (rhs, 0);
1988
            }
1989
          else if (GET_CODE (rhs) == MULT
1990
                   && CONST_INT_P (XEXP (rhs, 1)))
1991
            {
1992
              negcoeff1l = -INTVAL (XEXP (rhs, 1));
1993
              negcoeff1h = INTVAL (XEXP (rhs, 1)) <= 0 ? 0 : -1;
1994
              rhs = XEXP (rhs, 0);
1995
            }
1996
          else if (GET_CODE (rhs) == ASHIFT
1997
                   && CONST_INT_P (XEXP (rhs, 1))
1998
                   && INTVAL (XEXP (rhs, 1)) >= 0
1999
                   && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
2000
            {
2001
              negcoeff1l = -(((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1)));
2002
              negcoeff1h = -1;
2003
              rhs = XEXP (rhs, 0);
2004
            }
2005
 
2006
          if (rtx_equal_p (lhs, rhs))
2007
            {
2008
              rtx orig = gen_rtx_MINUS (mode, op0, op1);
2009
              rtx coeff;
2010
              unsigned HOST_WIDE_INT l;
2011
              HOST_WIDE_INT h;
2012
              bool speed = optimize_function_for_speed_p (cfun);
2013
 
2014
              add_double (coeff0l, coeff0h, negcoeff1l, negcoeff1h, &l, &h);
2015
              coeff = immed_double_const (l, h, mode);
2016
 
2017
              tem = simplify_gen_binary (MULT, mode, lhs, coeff);
2018
              return rtx_cost (tem, SET, speed) <= rtx_cost (orig, SET, speed)
2019
                ? tem : 0;
2020
            }
2021
        }
2022
 
2023
      /* (a - (-b)) -> (a + b).  True even for IEEE.  */
2024
      if (GET_CODE (op1) == NEG)
2025
        return simplify_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
2026
 
2027
      /* (-x - c) may be simplified as (-c - x).  */
2028
      if (GET_CODE (op0) == NEG
2029
          && (CONST_INT_P (op1)
2030
              || GET_CODE (op1) == CONST_DOUBLE))
2031
        {
2032
          tem = simplify_unary_operation (NEG, mode, op1, mode);
2033
          if (tem)
2034
            return simplify_gen_binary (MINUS, mode, tem, XEXP (op0, 0));
2035
        }
2036
 
2037
      /* Don't let a relocatable value get a negative coeff.  */
2038
      if (CONST_INT_P (op1) && GET_MODE (op0) != VOIDmode)
2039
        return simplify_gen_binary (PLUS, mode,
2040
                                    op0,
2041
                                    neg_const_int (mode, op1));
2042
 
2043
      /* (x - (x & y)) -> (x & ~y) */
2044
      if (GET_CODE (op1) == AND)
2045
        {
2046
          if (rtx_equal_p (op0, XEXP (op1, 0)))
2047
            {
2048
              tem = simplify_gen_unary (NOT, mode, XEXP (op1, 1),
2049
                                        GET_MODE (XEXP (op1, 1)));
2050
              return simplify_gen_binary (AND, mode, op0, tem);
2051
            }
2052
          if (rtx_equal_p (op0, XEXP (op1, 1)))
2053
            {
2054
              tem = simplify_gen_unary (NOT, mode, XEXP (op1, 0),
2055
                                        GET_MODE (XEXP (op1, 0)));
2056
              return simplify_gen_binary (AND, mode, op0, tem);
2057
            }
2058
        }
2059
 
2060
      /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
2061
         by reversing the comparison code if valid.  */
2062
      if (STORE_FLAG_VALUE == 1
2063
          && trueop0 == const1_rtx
2064
          && COMPARISON_P (op1)
2065
          && (reversed = reversed_comparison (op1, mode)))
2066
        return reversed;
2067
 
2068
      /* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A).  */
2069
      if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode)
2070
          && GET_CODE (op1) == MULT
2071
          && GET_CODE (XEXP (op1, 0)) == NEG)
2072
        {
2073
          rtx in1, in2;
2074
 
2075
          in1 = XEXP (XEXP (op1, 0), 0);
2076
          in2 = XEXP (op1, 1);
2077
          return simplify_gen_binary (PLUS, mode,
2078
                                      simplify_gen_binary (MULT, mode,
2079
                                                           in1, in2),
2080
                                      op0);
2081
        }
2082
 
2083
      /* Canonicalize (minus (neg A) (mult B C)) to
2084
         (minus (mult (neg B) C) A).  */
2085
      if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode)
2086
          && GET_CODE (op1) == MULT
2087
          && GET_CODE (op0) == NEG)
2088
        {
2089
          rtx in1, in2;
2090
 
2091
          in1 = simplify_gen_unary (NEG, mode, XEXP (op1, 0), mode);
2092
          in2 = XEXP (op1, 1);
2093
          return simplify_gen_binary (MINUS, mode,
2094
                                      simplify_gen_binary (MULT, mode,
2095
                                                           in1, in2),
2096
                                      XEXP (op0, 0));
2097
        }
2098
 
2099
      /* If one of the operands is a PLUS or a MINUS, see if we can
2100
         simplify this by the associative law.  This will, for example,
2101
         canonicalize (minus A (plus B C)) to (minus (minus A B) C).
2102
         Don't use the associative law for floating point.
2103
         The inaccuracy makes it nonassociative,
2104
         and subtle programs can break if operations are associated.  */
2105
 
2106
      if (INTEGRAL_MODE_P (mode)
2107
          && (plus_minus_operand_p (op0)
2108
              || plus_minus_operand_p (op1))
2109
          && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
2110
        return tem;
2111
      break;
2112
 
2113
    case MULT:
2114
      if (trueop1 == constm1_rtx)
2115
        return simplify_gen_unary (NEG, mode, op0, mode);
2116
 
2117
      /* Maybe simplify x * 0 to 0.  The reduction is not valid if
2118
         x is NaN, since x * 0 is then also NaN.  Nor is it valid
2119
         when the mode has signed zeros, since multiplying a negative
2120
         number by 0 will give -0, not 0.  */
2121
      if (!HONOR_NANS (mode)
2122
          && !HONOR_SIGNED_ZEROS (mode)
2123
          && trueop1 == CONST0_RTX (mode)
2124
          && ! side_effects_p (op0))
2125
        return op1;
2126
 
2127
      /* In IEEE floating point, x*1 is not equivalent to x for
2128
         signalling NaNs.  */
2129
      if (!HONOR_SNANS (mode)
2130
          && trueop1 == CONST1_RTX (mode))
2131
        return op0;
2132
 
2133
      /* Convert multiply by constant power of two into shift unless
2134
         we are still generating RTL.  This test is a kludge.  */
2135
      if (CONST_INT_P (trueop1)
2136
          && (val = exact_log2 (INTVAL (trueop1))) >= 0
2137
          /* If the mode is larger than the host word size, and the
2138
             uppermost bit is set, then this isn't a power of two due
2139
             to implicit sign extension.  */
2140
          && (width <= HOST_BITS_PER_WIDE_INT
2141
              || val != HOST_BITS_PER_WIDE_INT - 1))
2142
        return simplify_gen_binary (ASHIFT, mode, op0, GEN_INT (val));
2143
 
2144
      /* Likewise for multipliers wider than a word.  */
2145
      if (GET_CODE (trueop1) == CONST_DOUBLE
2146
          && (GET_MODE (trueop1) == VOIDmode
2147
              || GET_MODE_CLASS (GET_MODE (trueop1)) == MODE_INT)
2148
          && GET_MODE (op0) == mode
2149
          && CONST_DOUBLE_LOW (trueop1) == 0
2150
          && (val = exact_log2 (CONST_DOUBLE_HIGH (trueop1))) >= 0)
2151
        return simplify_gen_binary (ASHIFT, mode, op0,
2152
                                    GEN_INT (val + HOST_BITS_PER_WIDE_INT));
2153
 
2154
      /* x*2 is x+x and x*(-1) is -x */
2155
      if (GET_CODE (trueop1) == CONST_DOUBLE
2156
          && SCALAR_FLOAT_MODE_P (GET_MODE (trueop1))
2157
          && !DECIMAL_FLOAT_MODE_P (GET_MODE (trueop1))
2158
          && GET_MODE (op0) == mode)
2159
        {
2160
          REAL_VALUE_TYPE d;
2161
          REAL_VALUE_FROM_CONST_DOUBLE (d, trueop1);
2162
 
2163
          if (REAL_VALUES_EQUAL (d, dconst2))
2164
            return simplify_gen_binary (PLUS, mode, op0, copy_rtx (op0));
2165
 
2166
          if (!HONOR_SNANS (mode)
2167
              && REAL_VALUES_EQUAL (d, dconstm1))
2168
            return simplify_gen_unary (NEG, mode, op0, mode);
2169
        }
2170
 
2171
      /* Optimize -x * -x as x * x.  */
2172
      if (FLOAT_MODE_P (mode)
2173
          && GET_CODE (op0) == NEG
2174
          && GET_CODE (op1) == NEG
2175
          && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
2176
          && !side_effects_p (XEXP (op0, 0)))
2177
        return simplify_gen_binary (MULT, mode, XEXP (op0, 0), XEXP (op1, 0));
2178
 
2179
      /* Likewise, optimize abs(x) * abs(x) as x * x.  */
2180
      if (SCALAR_FLOAT_MODE_P (mode)
2181
          && GET_CODE (op0) == ABS
2182
          && GET_CODE (op1) == ABS
2183
          && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
2184
          && !side_effects_p (XEXP (op0, 0)))
2185
        return simplify_gen_binary (MULT, mode, XEXP (op0, 0), XEXP (op1, 0));
2186
 
2187
      /* Reassociate multiplication, but for floating point MULTs
2188
         only when the user specifies unsafe math optimizations.  */
2189
      if (! FLOAT_MODE_P (mode)
2190
          || flag_unsafe_math_optimizations)
2191
        {
2192
          tem = simplify_associative_operation (code, mode, op0, op1);
2193
          if (tem)
2194
            return tem;
2195
        }
2196
      break;
2197
 
2198
    case IOR:
2199
      if (trueop1 == const0_rtx)
2200
        return op0;
2201
      if (CONST_INT_P (trueop1)
2202
          && ((INTVAL (trueop1) & GET_MODE_MASK (mode))
2203
              == GET_MODE_MASK (mode)))
2204
        return op1;
2205
      if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2206
        return op0;
2207
      /* A | (~A) -> -1 */
2208
      if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
2209
           || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
2210
          && ! side_effects_p (op0)
2211
          && SCALAR_INT_MODE_P (mode))
2212
        return constm1_rtx;
2213
 
2214
      /* (ior A C) is C if all bits of A that might be nonzero are on in C.  */
2215
      if (CONST_INT_P (op1)
2216
          && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2217
          && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
2218
        return op1;
2219
 
2220
      /* Canonicalize (X & C1) | C2.  */
2221
      if (GET_CODE (op0) == AND
2222
          && CONST_INT_P (trueop1)
2223
          && CONST_INT_P (XEXP (op0, 1)))
2224
        {
2225
          HOST_WIDE_INT mask = GET_MODE_MASK (mode);
2226
          HOST_WIDE_INT c1 = INTVAL (XEXP (op0, 1));
2227
          HOST_WIDE_INT c2 = INTVAL (trueop1);
2228
 
2229
          /* If (C1&C2) == C1, then (X&C1)|C2 becomes X.  */
2230
          if ((c1 & c2) == c1
2231
              && !side_effects_p (XEXP (op0, 0)))
2232
            return trueop1;
2233
 
2234
          /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
2235
          if (((c1|c2) & mask) == mask)
2236
            return simplify_gen_binary (IOR, mode, XEXP (op0, 0), op1);
2237
 
2238
          /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2.  */
2239
          if (((c1 & ~c2) & mask) != (c1 & mask))
2240
            {
2241
              tem = simplify_gen_binary (AND, mode, XEXP (op0, 0),
2242
                                         gen_int_mode (c1 & ~c2, mode));
2243
              return simplify_gen_binary (IOR, mode, tem, op1);
2244
            }
2245
        }
2246
 
2247
      /* Convert (A & B) | A to A.  */
2248
      if (GET_CODE (op0) == AND
2249
          && (rtx_equal_p (XEXP (op0, 0), op1)
2250
              || rtx_equal_p (XEXP (op0, 1), op1))
2251
          && ! side_effects_p (XEXP (op0, 0))
2252
          && ! side_effects_p (XEXP (op0, 1)))
2253
        return op1;
2254
 
2255
      /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
2256
         mode size to (rotate A CX).  */
2257
 
2258
      if (GET_CODE (op1) == ASHIFT
2259
          || GET_CODE (op1) == SUBREG)
2260
        {
2261
          opleft = op1;
2262
          opright = op0;
2263
        }
2264
      else
2265
        {
2266
          opright = op1;
2267
          opleft = op0;
2268
        }
2269
 
2270
      if (GET_CODE (opleft) == ASHIFT && GET_CODE (opright) == LSHIFTRT
2271
          && rtx_equal_p (XEXP (opleft, 0), XEXP (opright, 0))
2272
          && CONST_INT_P (XEXP (opleft, 1))
2273
          && CONST_INT_P (XEXP (opright, 1))
2274
          && (INTVAL (XEXP (opleft, 1)) + INTVAL (XEXP (opright, 1))
2275
              == GET_MODE_BITSIZE (mode)))
2276
        return gen_rtx_ROTATE (mode, XEXP (opright, 0), XEXP (opleft, 1));
2277
 
2278
      /* Same, but for ashift that has been "simplified" to a wider mode
2279
        by simplify_shift_const.  */
2280
 
2281
      if (GET_CODE (opleft) == SUBREG
2282
          && GET_CODE (SUBREG_REG (opleft)) == ASHIFT
2283
          && GET_CODE (opright) == LSHIFTRT
2284
          && GET_CODE (XEXP (opright, 0)) == SUBREG
2285
          && GET_MODE (opleft) == GET_MODE (XEXP (opright, 0))
2286
          && SUBREG_BYTE (opleft) == SUBREG_BYTE (XEXP (opright, 0))
2287
          && (GET_MODE_SIZE (GET_MODE (opleft))
2288
              < GET_MODE_SIZE (GET_MODE (SUBREG_REG (opleft))))
2289
          && rtx_equal_p (XEXP (SUBREG_REG (opleft), 0),
2290
                          SUBREG_REG (XEXP (opright, 0)))
2291
          && CONST_INT_P (XEXP (SUBREG_REG (opleft), 1))
2292
          && CONST_INT_P (XEXP (opright, 1))
2293
          && (INTVAL (XEXP (SUBREG_REG (opleft), 1)) + INTVAL (XEXP (opright, 1))
2294
              == GET_MODE_BITSIZE (mode)))
2295
        return gen_rtx_ROTATE (mode, XEXP (opright, 0),
2296
                               XEXP (SUBREG_REG (opleft), 1));
2297
 
2298
      /* If we have (ior (and (X C1) C2)), simplify this by making
2299
         C1 as small as possible if C1 actually changes.  */
2300
      if (CONST_INT_P (op1)
2301
          && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2302
              || INTVAL (op1) > 0)
2303
          && GET_CODE (op0) == AND
2304
          && CONST_INT_P (XEXP (op0, 1))
2305
          && CONST_INT_P (op1)
2306
          && (INTVAL (XEXP (op0, 1)) & INTVAL (op1)) != 0)
2307
        return simplify_gen_binary (IOR, mode,
2308
                                    simplify_gen_binary
2309
                                          (AND, mode, XEXP (op0, 0),
2310
                                           GEN_INT (INTVAL (XEXP (op0, 1))
2311
                                                    & ~INTVAL (op1))),
2312
                                    op1);
2313
 
2314
      /* If OP0 is (ashiftrt (plus ...) C), it might actually be
2315
         a (sign_extend (plus ...)).  Then check if OP1 is a CONST_INT and
2316
         the PLUS does not affect any of the bits in OP1: then we can do
2317
         the IOR as a PLUS and we can associate.  This is valid if OP1
2318
         can be safely shifted left C bits.  */
2319
      if (CONST_INT_P (trueop1) && GET_CODE (op0) == ASHIFTRT
2320
          && GET_CODE (XEXP (op0, 0)) == PLUS
2321
          && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
2322
          && CONST_INT_P (XEXP (op0, 1))
2323
          && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
2324
        {
2325
          int count = INTVAL (XEXP (op0, 1));
2326
          HOST_WIDE_INT mask = INTVAL (trueop1) << count;
2327
 
2328
          if (mask >> count == INTVAL (trueop1)
2329
              && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
2330
            return simplify_gen_binary (ASHIFTRT, mode,
2331
                                        plus_constant (XEXP (op0, 0), mask),
2332
                                        XEXP (op0, 1));
2333
        }
2334
 
2335
      tem = simplify_associative_operation (code, mode, op0, op1);
2336
      if (tem)
2337
        return tem;
2338
      break;
2339
 
2340
    case XOR:
2341
      if (trueop1 == const0_rtx)
2342
        return op0;
2343
      if (CONST_INT_P (trueop1)
2344
          && ((INTVAL (trueop1) & GET_MODE_MASK (mode))
2345
              == GET_MODE_MASK (mode)))
2346
        return simplify_gen_unary (NOT, mode, op0, mode);
2347
      if (rtx_equal_p (trueop0, trueop1)
2348
          && ! side_effects_p (op0)
2349
          && GET_MODE_CLASS (mode) != MODE_CC)
2350
         return CONST0_RTX (mode);
2351
 
2352
      /* Canonicalize XOR of the most significant bit to PLUS.  */
2353
      if ((CONST_INT_P (op1)
2354
           || GET_CODE (op1) == CONST_DOUBLE)
2355
          && mode_signbit_p (mode, op1))
2356
        return simplify_gen_binary (PLUS, mode, op0, op1);
2357
      /* (xor (plus X C1) C2) is (xor X (C1^C2)) if C1 is signbit.  */
2358
      if ((CONST_INT_P (op1)
2359
           || GET_CODE (op1) == CONST_DOUBLE)
2360
          && GET_CODE (op0) == PLUS
2361
          && (CONST_INT_P (XEXP (op0, 1))
2362
              || GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE)
2363
          && mode_signbit_p (mode, XEXP (op0, 1)))
2364
        return simplify_gen_binary (XOR, mode, XEXP (op0, 0),
2365
                                    simplify_gen_binary (XOR, mode, op1,
2366
                                                         XEXP (op0, 1)));
2367
 
2368
      /* If we are XORing two things that have no bits in common,
2369
         convert them into an IOR.  This helps to detect rotation encoded
2370
         using those methods and possibly other simplifications.  */
2371
 
2372
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2373
          && (nonzero_bits (op0, mode)
2374
              & nonzero_bits (op1, mode)) == 0)
2375
        return (simplify_gen_binary (IOR, mode, op0, op1));
2376
 
2377
      /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
2378
         Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
2379
         (NOT y).  */
2380
      {
2381
        int num_negated = 0;
2382
 
2383
        if (GET_CODE (op0) == NOT)
2384
          num_negated++, op0 = XEXP (op0, 0);
2385
        if (GET_CODE (op1) == NOT)
2386
          num_negated++, op1 = XEXP (op1, 0);
2387
 
2388
        if (num_negated == 2)
2389
          return simplify_gen_binary (XOR, mode, op0, op1);
2390
        else if (num_negated == 1)
2391
          return simplify_gen_unary (NOT, mode,
2392
                                     simplify_gen_binary (XOR, mode, op0, op1),
2393
                                     mode);
2394
      }
2395
 
2396
      /* Convert (xor (and A B) B) to (and (not A) B).  The latter may
2397
         correspond to a machine insn or result in further simplifications
2398
         if B is a constant.  */
2399
 
2400
      if (GET_CODE (op0) == AND
2401
          && rtx_equal_p (XEXP (op0, 1), op1)
2402
          && ! side_effects_p (op1))
2403
        return simplify_gen_binary (AND, mode,
2404
                                    simplify_gen_unary (NOT, mode,
2405
                                                        XEXP (op0, 0), mode),
2406
                                    op1);
2407
 
2408
      else if (GET_CODE (op0) == AND
2409
               && rtx_equal_p (XEXP (op0, 0), op1)
2410
               && ! side_effects_p (op1))
2411
        return simplify_gen_binary (AND, mode,
2412
                                    simplify_gen_unary (NOT, mode,
2413
                                                        XEXP (op0, 1), mode),
2414
                                    op1);
2415
 
2416
      /* (xor (comparison foo bar) (const_int 1)) can become the reversed
2417
         comparison if STORE_FLAG_VALUE is 1.  */
2418
      if (STORE_FLAG_VALUE == 1
2419
          && trueop1 == const1_rtx
2420
          && COMPARISON_P (op0)
2421
          && (reversed = reversed_comparison (op0, mode)))
2422
        return reversed;
2423
 
2424
      /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
2425
         is (lt foo (const_int 0)), so we can perform the above
2426
         simplification if STORE_FLAG_VALUE is 1.  */
2427
 
2428
      if (STORE_FLAG_VALUE == 1
2429
          && trueop1 == const1_rtx
2430
          && GET_CODE (op0) == LSHIFTRT
2431
          && CONST_INT_P (XEXP (op0, 1))
2432
          && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
2433
        return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
2434
 
2435
      /* (xor (comparison foo bar) (const_int sign-bit))
2436
         when STORE_FLAG_VALUE is the sign bit.  */
2437
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2438
          && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
2439
              == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
2440
          && trueop1 == const_true_rtx
2441
          && COMPARISON_P (op0)
2442
          && (reversed = reversed_comparison (op0, mode)))
2443
        return reversed;
2444
 
2445
      tem = simplify_associative_operation (code, mode, op0, op1);
2446
      if (tem)
2447
        return tem;
2448
      break;
2449
 
2450
    case AND:
2451
      if (trueop1 == CONST0_RTX (mode) && ! side_effects_p (op0))
2452
        return trueop1;
2453
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2454
        {
2455
          HOST_WIDE_INT nzop0 = nonzero_bits (trueop0, mode);
2456
          HOST_WIDE_INT nzop1;
2457
          if (CONST_INT_P (trueop1))
2458
            {
2459
              HOST_WIDE_INT val1 = INTVAL (trueop1);
2460
              /* If we are turning off bits already known off in OP0, we need
2461
                 not do an AND.  */
2462
              if ((nzop0 & ~val1) == 0)
2463
                return op0;
2464
            }
2465
          nzop1 = nonzero_bits (trueop1, mode);
2466
          /* If we are clearing all the nonzero bits, the result is zero.  */
2467
          if ((nzop1 & nzop0) == 0
2468
              && !side_effects_p (op0) && !side_effects_p (op1))
2469
            return CONST0_RTX (mode);
2470
        }
2471
      if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)
2472
          && GET_MODE_CLASS (mode) != MODE_CC)
2473
        return op0;
2474
      /* A & (~A) -> 0 */
2475
      if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
2476
           || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
2477
          && ! side_effects_p (op0)
2478
          && GET_MODE_CLASS (mode) != MODE_CC)
2479
        return CONST0_RTX (mode);
2480
 
2481
      /* Transform (and (extend X) C) into (zero_extend (and X C)) if
2482
         there are no nonzero bits of C outside of X's mode.  */
2483
      if ((GET_CODE (op0) == SIGN_EXTEND
2484
           || GET_CODE (op0) == ZERO_EXTEND)
2485
          && CONST_INT_P (trueop1)
2486
          && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2487
          && (~GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))
2488
              & INTVAL (trueop1)) == 0)
2489
        {
2490
          enum machine_mode imode = GET_MODE (XEXP (op0, 0));
2491
          tem = simplify_gen_binary (AND, imode, XEXP (op0, 0),
2492
                                     gen_int_mode (INTVAL (trueop1),
2493
                                                   imode));
2494
          return simplify_gen_unary (ZERO_EXTEND, mode, tem, imode);
2495
        }
2496
 
2497
      /* Transform (and (truncate X) C) into (truncate (and X C)).  This way
2498
         we might be able to further simplify the AND with X and potentially
2499
         remove the truncation altogether.  */
2500
      if (GET_CODE (op0) == TRUNCATE && CONST_INT_P (trueop1))
2501
        {
2502
          rtx x = XEXP (op0, 0);
2503
          enum machine_mode xmode = GET_MODE (x);
2504
          tem = simplify_gen_binary (AND, xmode, x,
2505
                                     gen_int_mode (INTVAL (trueop1), xmode));
2506
          return simplify_gen_unary (TRUNCATE, mode, tem, xmode);
2507
        }
2508
 
2509
      /* Canonicalize (A | C1) & C2 as (A & C2) | (C1 & C2).  */
2510
      if (GET_CODE (op0) == IOR
2511
          && CONST_INT_P (trueop1)
2512
          && CONST_INT_P (XEXP (op0, 1)))
2513
        {
2514
          HOST_WIDE_INT tmp = INTVAL (trueop1) & INTVAL (XEXP (op0, 1));
2515
          return simplify_gen_binary (IOR, mode,
2516
                                      simplify_gen_binary (AND, mode,
2517
                                                           XEXP (op0, 0), op1),
2518
                                      gen_int_mode (tmp, mode));
2519
        }
2520
 
2521
      /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
2522
         insn (and may simplify more).  */
2523
      if (GET_CODE (op0) == XOR
2524
          && rtx_equal_p (XEXP (op0, 0), op1)
2525
          && ! side_effects_p (op1))
2526
        return simplify_gen_binary (AND, mode,
2527
                                    simplify_gen_unary (NOT, mode,
2528
                                                        XEXP (op0, 1), mode),
2529
                                    op1);
2530
 
2531
      if (GET_CODE (op0) == XOR
2532
          && rtx_equal_p (XEXP (op0, 1), op1)
2533
          && ! side_effects_p (op1))
2534
        return simplify_gen_binary (AND, mode,
2535
                                    simplify_gen_unary (NOT, mode,
2536
                                                        XEXP (op0, 0), mode),
2537
                                    op1);
2538
 
2539
      /* Similarly for (~(A ^ B)) & A.  */
2540
      if (GET_CODE (op0) == NOT
2541
          && GET_CODE (XEXP (op0, 0)) == XOR
2542
          && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
2543
          && ! side_effects_p (op1))
2544
        return simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
2545
 
2546
      if (GET_CODE (op0) == NOT
2547
          && GET_CODE (XEXP (op0, 0)) == XOR
2548
          && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
2549
          && ! side_effects_p (op1))
2550
        return simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
2551
 
2552
      /* Convert (A | B) & A to A.  */
2553
      if (GET_CODE (op0) == IOR
2554
          && (rtx_equal_p (XEXP (op0, 0), op1)
2555
              || rtx_equal_p (XEXP (op0, 1), op1))
2556
          && ! side_effects_p (XEXP (op0, 0))
2557
          && ! side_effects_p (XEXP (op0, 1)))
2558
        return op1;
2559
 
2560
      /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
2561
         ((A & N) + B) & M -> (A + B) & M
2562
         Similarly if (N & M) == 0,
2563
         ((A | N) + B) & M -> (A + B) & M
2564
         and for - instead of + and/or ^ instead of |.
2565
         Also, if (N & M) == 0, then
2566
         (A +- N) & M -> A & M.  */
2567
      if (CONST_INT_P (trueop1)
2568
          && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2569
          && ~INTVAL (trueop1)
2570
          && (INTVAL (trueop1) & (INTVAL (trueop1) + 1)) == 0
2571
          && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS))
2572
        {
2573
          rtx pmop[2];
2574
          int which;
2575
 
2576
          pmop[0] = XEXP (op0, 0);
2577
          pmop[1] = XEXP (op0, 1);
2578
 
2579
          if (CONST_INT_P (pmop[1])
2580
              && (INTVAL (pmop[1]) & INTVAL (trueop1)) == 0)
2581
            return simplify_gen_binary (AND, mode, pmop[0], op1);
2582
 
2583
          for (which = 0; which < 2; which++)
2584
            {
2585
              tem = pmop[which];
2586
              switch (GET_CODE (tem))
2587
                {
2588
                case AND:
2589
                  if (CONST_INT_P (XEXP (tem, 1))
2590
                      && (INTVAL (XEXP (tem, 1)) & INTVAL (trueop1))
2591
                      == INTVAL (trueop1))
2592
                    pmop[which] = XEXP (tem, 0);
2593
                  break;
2594
                case IOR:
2595
                case XOR:
2596
                  if (CONST_INT_P (XEXP (tem, 1))
2597
                      && (INTVAL (XEXP (tem, 1)) & INTVAL (trueop1)) == 0)
2598
                    pmop[which] = XEXP (tem, 0);
2599
                  break;
2600
                default:
2601
                  break;
2602
                }
2603
            }
2604
 
2605
          if (pmop[0] != XEXP (op0, 0) || pmop[1] != XEXP (op0, 1))
2606
            {
2607
              tem = simplify_gen_binary (GET_CODE (op0), mode,
2608
                                         pmop[0], pmop[1]);
2609
              return simplify_gen_binary (code, mode, tem, op1);
2610
            }
2611
        }
2612
 
2613
      /* (and X (ior (not X) Y) -> (and X Y) */
2614
      if (GET_CODE (op1) == IOR
2615
          && GET_CODE (XEXP (op1, 0)) == NOT
2616
          && op0 == XEXP (XEXP (op1, 0), 0))
2617
       return simplify_gen_binary (AND, mode, op0, XEXP (op1, 1));
2618
 
2619
      /* (and (ior (not X) Y) X) -> (and X Y) */
2620
      if (GET_CODE (op0) == IOR
2621
          && GET_CODE (XEXP (op0, 0)) == NOT
2622
          && op1 == XEXP (XEXP (op0, 0), 0))
2623
        return simplify_gen_binary (AND, mode, op1, XEXP (op0, 1));
2624
 
2625
      tem = simplify_associative_operation (code, mode, op0, op1);
2626
      if (tem)
2627
        return tem;
2628
      break;
2629
 
2630
    case UDIV:
2631
      /* 0/x is 0 (or x&0 if x has side-effects).  */
2632
      if (trueop0 == CONST0_RTX (mode))
2633
        {
2634
          if (side_effects_p (op1))
2635
            return simplify_gen_binary (AND, mode, op1, trueop0);
2636
          return trueop0;
2637
        }
2638
      /* x/1 is x.  */
2639
      if (trueop1 == CONST1_RTX (mode))
2640
        return rtl_hooks.gen_lowpart_no_emit (mode, op0);
2641
      /* Convert divide by power of two into shift.  */
2642
      if (CONST_INT_P (trueop1)
2643
          && (val = exact_log2 (INTVAL (trueop1))) > 0)
2644
        return simplify_gen_binary (LSHIFTRT, mode, op0, GEN_INT (val));
2645
      break;
2646
 
2647
    case DIV:
2648
      /* Handle floating point and integers separately.  */
2649
      if (SCALAR_FLOAT_MODE_P (mode))
2650
        {
2651
          /* Maybe change 0.0 / x to 0.0.  This transformation isn't
2652
             safe for modes with NaNs, since 0.0 / 0.0 will then be
2653
             NaN rather than 0.0.  Nor is it safe for modes with signed
2654
             zeros, since dividing 0 by a negative number gives -0.0  */
2655
          if (trueop0 == CONST0_RTX (mode)
2656
              && !HONOR_NANS (mode)
2657
              && !HONOR_SIGNED_ZEROS (mode)
2658
              && ! side_effects_p (op1))
2659
            return op0;
2660
          /* x/1.0 is x.  */
2661
          if (trueop1 == CONST1_RTX (mode)
2662
              && !HONOR_SNANS (mode))
2663
            return op0;
2664
 
2665
          if (GET_CODE (trueop1) == CONST_DOUBLE
2666
              && trueop1 != CONST0_RTX (mode))
2667
            {
2668
              REAL_VALUE_TYPE d;
2669
              REAL_VALUE_FROM_CONST_DOUBLE (d, trueop1);
2670
 
2671
              /* x/-1.0 is -x.  */
2672
              if (REAL_VALUES_EQUAL (d, dconstm1)
2673
                  && !HONOR_SNANS (mode))
2674
                return simplify_gen_unary (NEG, mode, op0, mode);
2675
 
2676
              /* Change FP division by a constant into multiplication.
2677
                 Only do this with -freciprocal-math.  */
2678
              if (flag_reciprocal_math
2679
                  && !REAL_VALUES_EQUAL (d, dconst0))
2680
                {
2681
                  REAL_ARITHMETIC (d, RDIV_EXPR, dconst1, d);
2682
                  tem = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2683
                  return simplify_gen_binary (MULT, mode, op0, tem);
2684
                }
2685
            }
2686
        }
2687
      else
2688
        {
2689
          /* 0/x is 0 (or x&0 if x has side-effects).  */
2690
          if (trueop0 == CONST0_RTX (mode))
2691
            {
2692
              if (side_effects_p (op1))
2693
                return simplify_gen_binary (AND, mode, op1, trueop0);
2694
              return trueop0;
2695
            }
2696
          /* x/1 is x.  */
2697
          if (trueop1 == CONST1_RTX (mode))
2698
            return rtl_hooks.gen_lowpart_no_emit (mode, op0);
2699
          /* x/-1 is -x.  */
2700
          if (trueop1 == constm1_rtx)
2701
            {
2702
              rtx x = rtl_hooks.gen_lowpart_no_emit (mode, op0);
2703
              return simplify_gen_unary (NEG, mode, x, mode);
2704
            }
2705
        }
2706
      break;
2707
 
2708
    case UMOD:
2709
      /* 0%x is 0 (or x&0 if x has side-effects).  */
2710
      if (trueop0 == CONST0_RTX (mode))
2711
        {
2712
          if (side_effects_p (op1))
2713
            return simplify_gen_binary (AND, mode, op1, trueop0);
2714
          return trueop0;
2715
        }
2716
      /* x%1 is 0 (of x&0 if x has side-effects).  */
2717
      if (trueop1 == CONST1_RTX (mode))
2718
        {
2719
          if (side_effects_p (op0))
2720
            return simplify_gen_binary (AND, mode, op0, CONST0_RTX (mode));
2721
          return CONST0_RTX (mode);
2722
        }
2723
      /* Implement modulus by power of two as AND.  */
2724
      if (CONST_INT_P (trueop1)
2725
          && exact_log2 (INTVAL (trueop1)) > 0)
2726
        return simplify_gen_binary (AND, mode, op0,
2727
                                    GEN_INT (INTVAL (op1) - 1));
2728
      break;
2729
 
2730
    case MOD:
2731
      /* 0%x is 0 (or x&0 if x has side-effects).  */
2732
      if (trueop0 == CONST0_RTX (mode))
2733
        {
2734
          if (side_effects_p (op1))
2735
            return simplify_gen_binary (AND, mode, op1, trueop0);
2736
          return trueop0;
2737
        }
2738
      /* x%1 and x%-1 is 0 (or x&0 if x has side-effects).  */
2739
      if (trueop1 == CONST1_RTX (mode) || trueop1 == constm1_rtx)
2740
        {
2741
          if (side_effects_p (op0))
2742
            return simplify_gen_binary (AND, mode, op0, CONST0_RTX (mode));
2743
          return CONST0_RTX (mode);
2744
        }
2745
      break;
2746
 
2747
    case ROTATERT:
2748
    case ROTATE:
2749
    case ASHIFTRT:
2750
      if (trueop1 == CONST0_RTX (mode))
2751
        return op0;
2752
      if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1))
2753
        return op0;
2754
      /* Rotating ~0 always results in ~0.  */
2755
      if (CONST_INT_P (trueop0) && width <= HOST_BITS_PER_WIDE_INT
2756
          && (unsigned HOST_WIDE_INT) INTVAL (trueop0) == GET_MODE_MASK (mode)
2757
          && ! side_effects_p (op1))
2758
        return op0;
2759
    canonicalize_shift:
2760
      if (SHIFT_COUNT_TRUNCATED && CONST_INT_P (op1))
2761
        {
2762
          val = INTVAL (op1) & (GET_MODE_BITSIZE (mode) - 1);
2763
          if (val != INTVAL (op1))
2764
            return simplify_gen_binary (code, mode, op0, GEN_INT (val));
2765
        }
2766
      break;
2767
 
2768
    case ASHIFT:
2769
    case SS_ASHIFT:
2770
    case US_ASHIFT:
2771
      if (trueop1 == CONST0_RTX (mode))
2772
        return op0;
2773
      if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1))
2774
        return op0;
2775
      goto canonicalize_shift;
2776
 
2777
    case LSHIFTRT:
2778
      if (trueop1 == CONST0_RTX (mode))
2779
        return op0;
2780
      if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1))
2781
        return op0;
2782
      /* Optimize (lshiftrt (clz X) C) as (eq X 0).  */
2783
      if (GET_CODE (op0) == CLZ
2784
          && CONST_INT_P (trueop1)
2785
          && STORE_FLAG_VALUE == 1
2786
          && INTVAL (trueop1) < (HOST_WIDE_INT)width)
2787
        {
2788
          enum machine_mode imode = GET_MODE (XEXP (op0, 0));
2789
          unsigned HOST_WIDE_INT zero_val = 0;
2790
 
2791
          if (CLZ_DEFINED_VALUE_AT_ZERO (imode, zero_val)
2792
              && zero_val == GET_MODE_BITSIZE (imode)
2793
              && INTVAL (trueop1) == exact_log2 (zero_val))
2794
            return simplify_gen_relational (EQ, mode, imode,
2795
                                            XEXP (op0, 0), const0_rtx);
2796
        }
2797
      goto canonicalize_shift;
2798
 
2799
    case SMIN:
2800
      if (width <= HOST_BITS_PER_WIDE_INT
2801
          && CONST_INT_P (trueop1)
2802
          && INTVAL (trueop1) == (HOST_WIDE_INT) 1 << (width -1)
2803
          && ! side_effects_p (op0))
2804
        return op1;
2805
      if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2806
        return op0;
2807
      tem = simplify_associative_operation (code, mode, op0, op1);
2808
      if (tem)
2809
        return tem;
2810
      break;
2811
 
2812
    case SMAX:
2813
      if (width <= HOST_BITS_PER_WIDE_INT
2814
          && CONST_INT_P (trueop1)
2815
          && ((unsigned HOST_WIDE_INT) INTVAL (trueop1)
2816
              == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
2817
          && ! side_effects_p (op0))
2818
        return op1;
2819
      if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2820
        return op0;
2821
      tem = simplify_associative_operation (code, mode, op0, op1);
2822
      if (tem)
2823
        return tem;
2824
      break;
2825
 
2826
    case UMIN:
2827
      if (trueop1 == CONST0_RTX (mode) && ! side_effects_p (op0))
2828
        return op1;
2829
      if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2830
        return op0;
2831
      tem = simplify_associative_operation (code, mode, op0, op1);
2832
      if (tem)
2833
        return tem;
2834
      break;
2835
 
2836
    case UMAX:
2837
      if (trueop1 == constm1_rtx && ! side_effects_p (op0))
2838
        return op1;
2839
      if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2840
        return op0;
2841
      tem = simplify_associative_operation (code, mode, op0, op1);
2842
      if (tem)
2843
        return tem;
2844
      break;
2845
 
2846
    case SS_PLUS:
2847
    case US_PLUS:
2848
    case SS_MINUS:
2849
    case US_MINUS:
2850
    case SS_MULT:
2851
    case US_MULT:
2852
    case SS_DIV:
2853
    case US_DIV:
2854
      /* ??? There are simplifications that can be done.  */
2855
      return 0;
2856
 
2857
    case VEC_SELECT:
2858
      if (!VECTOR_MODE_P (mode))
2859
        {
2860
          gcc_assert (VECTOR_MODE_P (GET_MODE (trueop0)));
2861
          gcc_assert (mode == GET_MODE_INNER (GET_MODE (trueop0)));
2862
          gcc_assert (GET_CODE (trueop1) == PARALLEL);
2863
          gcc_assert (XVECLEN (trueop1, 0) == 1);
2864
          gcc_assert (CONST_INT_P (XVECEXP (trueop1, 0, 0)));
2865
 
2866
          if (GET_CODE (trueop0) == CONST_VECTOR)
2867
            return CONST_VECTOR_ELT (trueop0, INTVAL (XVECEXP
2868
                                                      (trueop1, 0, 0)));
2869
 
2870
          /* Extract a scalar element from a nested VEC_SELECT expression
2871
             (with optional nested VEC_CONCAT expression).  Some targets
2872
             (i386) extract scalar element from a vector using chain of
2873
             nested VEC_SELECT expressions.  When input operand is a memory
2874
             operand, this operation can be simplified to a simple scalar
2875
             load from an offseted memory address.  */
2876
          if (GET_CODE (trueop0) == VEC_SELECT)
2877
            {
2878
              rtx op0 = XEXP (trueop0, 0);
2879
              rtx op1 = XEXP (trueop0, 1);
2880
 
2881
              enum machine_mode opmode = GET_MODE (op0);
2882
              int elt_size = GET_MODE_SIZE (GET_MODE_INNER (opmode));
2883
              int n_elts = GET_MODE_SIZE (opmode) / elt_size;
2884
 
2885
              int i = INTVAL (XVECEXP (trueop1, 0, 0));
2886
              int elem;
2887
 
2888
              rtvec vec;
2889
              rtx tmp_op, tmp;
2890
 
2891
              gcc_assert (GET_CODE (op1) == PARALLEL);
2892
              gcc_assert (i < n_elts);
2893
 
2894
              /* Select element, pointed by nested selector.  */
2895
              elem = INTVAL (XVECEXP (op1, 0, i));
2896
 
2897
              /* Handle the case when nested VEC_SELECT wraps VEC_CONCAT.  */
2898
              if (GET_CODE (op0) == VEC_CONCAT)
2899
                {
2900
                  rtx op00 = XEXP (op0, 0);
2901
                  rtx op01 = XEXP (op0, 1);
2902
 
2903
                  enum machine_mode mode00, mode01;
2904
                  int n_elts00, n_elts01;
2905
 
2906
                  mode00 = GET_MODE (op00);
2907
                  mode01 = GET_MODE (op01);
2908
 
2909
                  /* Find out number of elements of each operand.  */
2910
                  if (VECTOR_MODE_P (mode00))
2911
                    {
2912
                      elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode00));
2913
                      n_elts00 = GET_MODE_SIZE (mode00) / elt_size;
2914
                    }
2915
                  else
2916
                    n_elts00 = 1;
2917
 
2918
                  if (VECTOR_MODE_P (mode01))
2919
                    {
2920
                      elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode01));
2921
                      n_elts01 = GET_MODE_SIZE (mode01) / elt_size;
2922
                    }
2923
                  else
2924
                    n_elts01 = 1;
2925
 
2926
                  gcc_assert (n_elts == n_elts00 + n_elts01);
2927
 
2928
                  /* Select correct operand of VEC_CONCAT
2929
                     and adjust selector. */
2930
                  if (elem < n_elts01)
2931
                    tmp_op = op00;
2932
                  else
2933
                    {
2934
                      tmp_op = op01;
2935
                      elem -= n_elts00;
2936
                    }
2937
                }
2938
              else
2939
                tmp_op = op0;
2940
 
2941
              vec = rtvec_alloc (1);
2942
              RTVEC_ELT (vec, 0) = GEN_INT (elem);
2943
 
2944
              tmp = gen_rtx_fmt_ee (code, mode,
2945
                                    tmp_op, gen_rtx_PARALLEL (VOIDmode, vec));
2946
              return tmp;
2947
            }
2948
          if (GET_CODE (trueop0) == VEC_DUPLICATE
2949
              && GET_MODE (XEXP (trueop0, 0)) == mode)
2950
            return XEXP (trueop0, 0);
2951
        }
2952
      else
2953
        {
2954
          gcc_assert (VECTOR_MODE_P (GET_MODE (trueop0)));
2955
          gcc_assert (GET_MODE_INNER (mode)
2956
                      == GET_MODE_INNER (GET_MODE (trueop0)));
2957
          gcc_assert (GET_CODE (trueop1) == PARALLEL);
2958
 
2959
          if (GET_CODE (trueop0) == CONST_VECTOR)
2960
            {
2961
              int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
2962
              unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
2963
              rtvec v = rtvec_alloc (n_elts);
2964
              unsigned int i;
2965
 
2966
              gcc_assert (XVECLEN (trueop1, 0) == (int) n_elts);
2967
              for (i = 0; i < n_elts; i++)
2968
                {
2969
                  rtx x = XVECEXP (trueop1, 0, i);
2970
 
2971
                  gcc_assert (CONST_INT_P (x));
2972
                  RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop0,
2973
                                                       INTVAL (x));
2974
                }
2975
 
2976
              return gen_rtx_CONST_VECTOR (mode, v);
2977
            }
2978
        }
2979
 
2980
      if (XVECLEN (trueop1, 0) == 1
2981
          && CONST_INT_P (XVECEXP (trueop1, 0, 0))
2982
          && GET_CODE (trueop0) == VEC_CONCAT)
2983
        {
2984
          rtx vec = trueop0;
2985
          int offset = INTVAL (XVECEXP (trueop1, 0, 0)) * GET_MODE_SIZE (mode);
2986
 
2987
          /* Try to find the element in the VEC_CONCAT.  */
2988
          while (GET_MODE (vec) != mode
2989
                 && GET_CODE (vec) == VEC_CONCAT)
2990
            {
2991
              HOST_WIDE_INT vec_size = GET_MODE_SIZE (GET_MODE (XEXP (vec, 0)));
2992
              if (offset < vec_size)
2993
                vec = XEXP (vec, 0);
2994
              else
2995
                {
2996
                  offset -= vec_size;
2997
                  vec = XEXP (vec, 1);
2998
                }
2999
              vec = avoid_constant_pool_reference (vec);
3000
            }
3001
 
3002
          if (GET_MODE (vec) == mode)
3003
            return vec;
3004
        }
3005
 
3006
      return 0;
3007
    case VEC_CONCAT:
3008
      {
3009
        enum machine_mode op0_mode = (GET_MODE (trueop0) != VOIDmode
3010
                                      ? GET_MODE (trueop0)
3011
                                      : GET_MODE_INNER (mode));
3012
        enum machine_mode op1_mode = (GET_MODE (trueop1) != VOIDmode
3013
                                      ? GET_MODE (trueop1)
3014
                                      : GET_MODE_INNER (mode));
3015
 
3016
        gcc_assert (VECTOR_MODE_P (mode));
3017
        gcc_assert (GET_MODE_SIZE (op0_mode) + GET_MODE_SIZE (op1_mode)
3018
                    == GET_MODE_SIZE (mode));
3019
 
3020
        if (VECTOR_MODE_P (op0_mode))
3021
          gcc_assert (GET_MODE_INNER (mode)
3022
                      == GET_MODE_INNER (op0_mode));
3023
        else
3024
          gcc_assert (GET_MODE_INNER (mode) == op0_mode);
3025
 
3026
        if (VECTOR_MODE_P (op1_mode))
3027
          gcc_assert (GET_MODE_INNER (mode)
3028
                      == GET_MODE_INNER (op1_mode));
3029
        else
3030
          gcc_assert (GET_MODE_INNER (mode) == op1_mode);
3031
 
3032
        if ((GET_CODE (trueop0) == CONST_VECTOR
3033
             || CONST_INT_P (trueop0)
3034
             || GET_CODE (trueop0) == CONST_DOUBLE)
3035
            && (GET_CODE (trueop1) == CONST_VECTOR
3036
                || CONST_INT_P (trueop1)
3037
                || GET_CODE (trueop1) == CONST_DOUBLE))
3038
          {
3039
            int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
3040
            unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
3041
            rtvec v = rtvec_alloc (n_elts);
3042
            unsigned int i;
3043
            unsigned in_n_elts = 1;
3044
 
3045
            if (VECTOR_MODE_P (op0_mode))
3046
              in_n_elts = (GET_MODE_SIZE (op0_mode) / elt_size);
3047
            for (i = 0; i < n_elts; i++)
3048
              {
3049
                if (i < in_n_elts)
3050
                  {
3051
                    if (!VECTOR_MODE_P (op0_mode))
3052
                      RTVEC_ELT (v, i) = trueop0;
3053
                    else
3054
                      RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop0, i);
3055
                  }
3056
                else
3057
                  {
3058
                    if (!VECTOR_MODE_P (op1_mode))
3059
                      RTVEC_ELT (v, i) = trueop1;
3060
                    else
3061
                      RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop1,
3062
                                                           i - in_n_elts);
3063
                  }
3064
              }
3065
 
3066
            return gen_rtx_CONST_VECTOR (mode, v);
3067
          }
3068
      }
3069
      return 0;
3070
 
3071
    default:
3072
      gcc_unreachable ();
3073
    }
3074
 
3075
  return 0;
3076
}
3077
 
3078
rtx
3079
simplify_const_binary_operation (enum rtx_code code, enum machine_mode mode,
3080
                                 rtx op0, rtx op1)
3081
{
3082
  HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3083
  HOST_WIDE_INT val;
3084
  unsigned int width = GET_MODE_BITSIZE (mode);
3085
 
3086
  if (VECTOR_MODE_P (mode)
3087
      && code != VEC_CONCAT
3088
      && GET_CODE (op0) == CONST_VECTOR
3089
      && GET_CODE (op1) == CONST_VECTOR)
3090
    {
3091
      unsigned n_elts = GET_MODE_NUNITS (mode);
3092
      enum machine_mode op0mode = GET_MODE (op0);
3093
      unsigned op0_n_elts = GET_MODE_NUNITS (op0mode);
3094
      enum machine_mode op1mode = GET_MODE (op1);
3095
      unsigned op1_n_elts = GET_MODE_NUNITS (op1mode);
3096
      rtvec v = rtvec_alloc (n_elts);
3097
      unsigned int i;
3098
 
3099
      gcc_assert (op0_n_elts == n_elts);
3100
      gcc_assert (op1_n_elts == n_elts);
3101
      for (i = 0; i < n_elts; i++)
3102
        {
3103
          rtx x = simplify_binary_operation (code, GET_MODE_INNER (mode),
3104
                                             CONST_VECTOR_ELT (op0, i),
3105
                                             CONST_VECTOR_ELT (op1, i));
3106
          if (!x)
3107
            return 0;
3108
          RTVEC_ELT (v, i) = x;
3109
        }
3110
 
3111
      return gen_rtx_CONST_VECTOR (mode, v);
3112
    }
3113
 
3114
  if (VECTOR_MODE_P (mode)
3115
      && code == VEC_CONCAT
3116
      && (CONST_INT_P (op0)
3117
          || GET_CODE (op0) == CONST_DOUBLE
3118
          || GET_CODE (op0) == CONST_FIXED)
3119
      && (CONST_INT_P (op1)
3120
          || GET_CODE (op1) == CONST_DOUBLE
3121
          || GET_CODE (op1) == CONST_FIXED))
3122
    {
3123
      unsigned n_elts = GET_MODE_NUNITS (mode);
3124
      rtvec v = rtvec_alloc (n_elts);
3125
 
3126
      gcc_assert (n_elts >= 2);
3127
      if (n_elts == 2)
3128
        {
3129
          gcc_assert (GET_CODE (op0) != CONST_VECTOR);
3130
          gcc_assert (GET_CODE (op1) != CONST_VECTOR);
3131
 
3132
          RTVEC_ELT (v, 0) = op0;
3133
          RTVEC_ELT (v, 1) = op1;
3134
        }
3135
      else
3136
        {
3137
          unsigned op0_n_elts = GET_MODE_NUNITS (GET_MODE (op0));
3138
          unsigned op1_n_elts = GET_MODE_NUNITS (GET_MODE (op1));
3139
          unsigned i;
3140
 
3141
          gcc_assert (GET_CODE (op0) == CONST_VECTOR);
3142
          gcc_assert (GET_CODE (op1) == CONST_VECTOR);
3143
          gcc_assert (op0_n_elts + op1_n_elts == n_elts);
3144
 
3145
          for (i = 0; i < op0_n_elts; ++i)
3146
            RTVEC_ELT (v, i) = XVECEXP (op0, 0, i);
3147
          for (i = 0; i < op1_n_elts; ++i)
3148
            RTVEC_ELT (v, op0_n_elts+i) = XVECEXP (op1, 0, i);
3149
        }
3150
 
3151
      return gen_rtx_CONST_VECTOR (mode, v);
3152
    }
3153
 
3154
  if (SCALAR_FLOAT_MODE_P (mode)
3155
      && GET_CODE (op0) == CONST_DOUBLE
3156
      && GET_CODE (op1) == CONST_DOUBLE
3157
      && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3158
    {
3159
      if (code == AND
3160
          || code == IOR
3161
          || code == XOR)
3162
        {
3163
          long tmp0[4];
3164
          long tmp1[4];
3165
          REAL_VALUE_TYPE r;
3166
          int i;
3167
 
3168
          real_to_target (tmp0, CONST_DOUBLE_REAL_VALUE (op0),
3169
                          GET_MODE (op0));
3170
          real_to_target (tmp1, CONST_DOUBLE_REAL_VALUE (op1),
3171
                          GET_MODE (op1));
3172
          for (i = 0; i < 4; i++)
3173
            {
3174
              switch (code)
3175
              {
3176
              case AND:
3177
                tmp0[i] &= tmp1[i];
3178
                break;
3179
              case IOR:
3180
                tmp0[i] |= tmp1[i];
3181
                break;
3182
              case XOR:
3183
                tmp0[i] ^= tmp1[i];
3184
                break;
3185
              default:
3186
                gcc_unreachable ();
3187
              }
3188
            }
3189
           real_from_target (&r, tmp0, mode);
3190
           return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
3191
        }
3192
      else
3193
        {
3194
          REAL_VALUE_TYPE f0, f1, value, result;
3195
          bool inexact;
3196
 
3197
          REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3198
          REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3199
          real_convert (&f0, mode, &f0);
3200
          real_convert (&f1, mode, &f1);
3201
 
3202
          if (HONOR_SNANS (mode)
3203
              && (REAL_VALUE_ISNAN (f0) || REAL_VALUE_ISNAN (f1)))
3204
            return 0;
3205
 
3206
          if (code == DIV
3207
              && REAL_VALUES_EQUAL (f1, dconst0)
3208
              && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
3209
            return 0;
3210
 
3211
          if (MODE_HAS_INFINITIES (mode) && HONOR_NANS (mode)
3212
              && flag_trapping_math
3213
              && REAL_VALUE_ISINF (f0) && REAL_VALUE_ISINF (f1))
3214
            {
3215
              int s0 = REAL_VALUE_NEGATIVE (f0);
3216
              int s1 = REAL_VALUE_NEGATIVE (f1);
3217
 
3218
              switch (code)
3219
                {
3220
                case PLUS:
3221
                  /* Inf + -Inf = NaN plus exception.  */
3222
                  if (s0 != s1)
3223
                    return 0;
3224
                  break;
3225
                case MINUS:
3226
                  /* Inf - Inf = NaN plus exception.  */
3227
                  if (s0 == s1)
3228
                    return 0;
3229
                  break;
3230
                case DIV:
3231
                  /* Inf / Inf = NaN plus exception.  */
3232
                  return 0;
3233
                default:
3234
                  break;
3235
                }
3236
            }
3237
 
3238
          if (code == MULT && MODE_HAS_INFINITIES (mode) && HONOR_NANS (mode)
3239
              && flag_trapping_math
3240
              && ((REAL_VALUE_ISINF (f0) && REAL_VALUES_EQUAL (f1, dconst0))
3241
                  || (REAL_VALUE_ISINF (f1)
3242
                      && REAL_VALUES_EQUAL (f0, dconst0))))
3243
            /* Inf * 0 = NaN plus exception.  */
3244
            return 0;
3245
 
3246
          inexact = real_arithmetic (&value, rtx_to_tree_code (code),
3247
                                     &f0, &f1);
3248
          real_convert (&result, mode, &value);
3249
 
3250
          /* Don't constant fold this floating point operation if
3251
             the result has overflowed and flag_trapping_math.  */
3252
 
3253
          if (flag_trapping_math
3254
              && MODE_HAS_INFINITIES (mode)
3255
              && REAL_VALUE_ISINF (result)
3256
              && !REAL_VALUE_ISINF (f0)
3257
              && !REAL_VALUE_ISINF (f1))
3258
            /* Overflow plus exception.  */
3259
            return 0;
3260
 
3261
          /* Don't constant fold this floating point operation if the
3262
             result may dependent upon the run-time rounding mode and
3263
             flag_rounding_math is set, or if GCC's software emulation
3264
             is unable to accurately represent the result.  */
3265
 
3266
          if ((flag_rounding_math
3267
               || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
3268
              && (inexact || !real_identical (&result, &value)))
3269
            return NULL_RTX;
3270
 
3271
          return CONST_DOUBLE_FROM_REAL_VALUE (result, mode);
3272
        }
3273
    }
3274
 
3275
  /* We can fold some multi-word operations.  */
3276
  if (GET_MODE_CLASS (mode) == MODE_INT
3277
      && width == HOST_BITS_PER_WIDE_INT * 2
3278
      && (GET_CODE (op0) == CONST_DOUBLE || CONST_INT_P (op0))
3279
      && (GET_CODE (op1) == CONST_DOUBLE || CONST_INT_P (op1)))
3280
    {
3281
      unsigned HOST_WIDE_INT l1, l2, lv, lt;
3282
      HOST_WIDE_INT h1, h2, hv, ht;
3283
 
3284
      if (GET_CODE (op0) == CONST_DOUBLE)
3285
        l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3286
      else
3287
        l1 = INTVAL (op0), h1 = HWI_SIGN_EXTEND (l1);
3288
 
3289
      if (GET_CODE (op1) == CONST_DOUBLE)
3290
        l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3291
      else
3292
        l2 = INTVAL (op1), h2 = HWI_SIGN_EXTEND (l2);
3293
 
3294
      switch (code)
3295
        {
3296
        case MINUS:
3297
          /* A - B == A + (-B).  */
3298
          neg_double (l2, h2, &lv, &hv);
3299
          l2 = lv, h2 = hv;
3300
 
3301
          /* Fall through....  */
3302
 
3303
        case PLUS:
3304
          add_double (l1, h1, l2, h2, &lv, &hv);
3305
          break;
3306
 
3307
        case MULT:
3308
          mul_double (l1, h1, l2, h2, &lv, &hv);
3309
          break;
3310
 
3311
        case DIV:
3312
          if (div_and_round_double (TRUNC_DIV_EXPR, 0, l1, h1, l2, h2,
3313
                                    &lv, &hv, &lt, &ht))
3314
            return 0;
3315
          break;
3316
 
3317
        case MOD:
3318
          if (div_and_round_double (TRUNC_DIV_EXPR, 0, l1, h1, l2, h2,
3319
                                    &lt, &ht, &lv, &hv))
3320
            return 0;
3321
          break;
3322
 
3323
        case UDIV:
3324
          if (div_and_round_double (TRUNC_DIV_EXPR, 1, l1, h1, l2, h2,
3325
                                    &lv, &hv, &lt, &ht))
3326
            return 0;
3327
          break;
3328
 
3329
        case UMOD:
3330
          if (div_and_round_double (TRUNC_DIV_EXPR, 1, l1, h1, l2, h2,
3331
                                    &lt, &ht, &lv, &hv))
3332
            return 0;
3333
          break;
3334
 
3335
        case AND:
3336
          lv = l1 & l2, hv = h1 & h2;
3337
          break;
3338
 
3339
        case IOR:
3340
          lv = l1 | l2, hv = h1 | h2;
3341
          break;
3342
 
3343
        case XOR:
3344
          lv = l1 ^ l2, hv = h1 ^ h2;
3345
          break;
3346
 
3347
        case SMIN:
3348
          if (h1 < h2
3349
              || (h1 == h2
3350
                  && ((unsigned HOST_WIDE_INT) l1
3351
                      < (unsigned HOST_WIDE_INT) l2)))
3352
            lv = l1, hv = h1;
3353
          else
3354
            lv = l2, hv = h2;
3355
          break;
3356
 
3357
        case SMAX:
3358
          if (h1 > h2
3359
              || (h1 == h2
3360
                  && ((unsigned HOST_WIDE_INT) l1
3361
                      > (unsigned HOST_WIDE_INT) l2)))
3362
            lv = l1, hv = h1;
3363
          else
3364
            lv = l2, hv = h2;
3365
          break;
3366
 
3367
        case UMIN:
3368
          if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3369
              || (h1 == h2
3370
                  && ((unsigned HOST_WIDE_INT) l1
3371
                      < (unsigned HOST_WIDE_INT) l2)))
3372
            lv = l1, hv = h1;
3373
          else
3374
            lv = l2, hv = h2;
3375
          break;
3376
 
3377
        case UMAX:
3378
          if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3379
              || (h1 == h2
3380
                  && ((unsigned HOST_WIDE_INT) l1
3381
                      > (unsigned HOST_WIDE_INT) l2)))
3382
            lv = l1, hv = h1;
3383
          else
3384
            lv = l2, hv = h2;
3385
          break;
3386
 
3387
        case LSHIFTRT:   case ASHIFTRT:
3388
        case ASHIFT:
3389
        case ROTATE:     case ROTATERT:
3390
          if (SHIFT_COUNT_TRUNCATED)
3391
            l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3392
 
3393
          if (h2 != 0 || l2 >= GET_MODE_BITSIZE (mode))
3394
            return 0;
3395
 
3396
          if (code == LSHIFTRT || code == ASHIFTRT)
3397
            rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3398
                           code == ASHIFTRT);
3399
          else if (code == ASHIFT)
3400
            lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3401
          else if (code == ROTATE)
3402
            lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3403
          else /* code == ROTATERT */
3404
            rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3405
          break;
3406
 
3407
        default:
3408
          return 0;
3409
        }
3410
 
3411
      return immed_double_const (lv, hv, mode);
3412
    }
3413
 
3414
  if (CONST_INT_P (op0) && CONST_INT_P (op1)
3415
      && width <= HOST_BITS_PER_WIDE_INT && width != 0)
3416
    {
3417
      /* Get the integer argument values in two forms:
3418
         zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S.  */
3419
 
3420
      arg0 = INTVAL (op0);
3421
      arg1 = INTVAL (op1);
3422
 
3423
      if (width < HOST_BITS_PER_WIDE_INT)
3424
        {
3425
          arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
3426
          arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
3427
 
3428
          arg0s = arg0;
3429
          if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
3430
            arg0s |= ((HOST_WIDE_INT) (-1) << width);
3431
 
3432
          arg1s = arg1;
3433
          if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
3434
            arg1s |= ((HOST_WIDE_INT) (-1) << width);
3435
        }
3436
      else
3437
        {
3438
          arg0s = arg0;
3439
          arg1s = arg1;
3440
        }
3441
 
3442
      /* Compute the value of the arithmetic.  */
3443
 
3444
      switch (code)
3445
        {
3446
        case PLUS:
3447
          val = arg0s + arg1s;
3448
          break;
3449
 
3450
        case MINUS:
3451
          val = arg0s - arg1s;
3452
          break;
3453
 
3454
        case MULT:
3455
          val = arg0s * arg1s;
3456
          break;
3457
 
3458
        case DIV:
3459
          if (arg1s == 0
3460
              || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
3461
                  && arg1s == -1))
3462
            return 0;
3463
          val = arg0s / arg1s;
3464
          break;
3465
 
3466
        case MOD:
3467
          if (arg1s == 0
3468
              || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
3469
                  && arg1s == -1))
3470
            return 0;
3471
          val = arg0s % arg1s;
3472
          break;
3473
 
3474
        case UDIV:
3475
          if (arg1 == 0
3476
              || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
3477
                  && arg1s == -1))
3478
            return 0;
3479
          val = (unsigned HOST_WIDE_INT) arg0 / arg1;
3480
          break;
3481
 
3482
        case UMOD:
3483
          if (arg1 == 0
3484
              || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
3485
                  && arg1s == -1))
3486
            return 0;
3487
          val = (unsigned HOST_WIDE_INT) arg0 % arg1;
3488
          break;
3489
 
3490
        case AND:
3491
          val = arg0 & arg1;
3492
          break;
3493
 
3494
        case IOR:
3495
          val = arg0 | arg1;
3496
          break;
3497
 
3498
        case XOR:
3499
          val = arg0 ^ arg1;
3500
          break;
3501
 
3502
        case LSHIFTRT:
3503
        case ASHIFT:
3504
        case ASHIFTRT:
3505
          /* Truncate the shift if SHIFT_COUNT_TRUNCATED, otherwise make sure
3506
             the value is in range.  We can't return any old value for
3507
             out-of-range arguments because either the middle-end (via
3508
             shift_truncation_mask) or the back-end might be relying on
3509
             target-specific knowledge.  Nor can we rely on
3510
             shift_truncation_mask, since the shift might not be part of an
3511
             ashlM3, lshrM3 or ashrM3 instruction.  */
3512
          if (SHIFT_COUNT_TRUNCATED)
3513
            arg1 = (unsigned HOST_WIDE_INT) arg1 % width;
3514
          else if (arg1 < 0 || arg1 >= GET_MODE_BITSIZE (mode))
3515
            return 0;
3516
 
3517
          val = (code == ASHIFT
3518
                 ? ((unsigned HOST_WIDE_INT) arg0) << arg1
3519
                 : ((unsigned HOST_WIDE_INT) arg0) >> arg1);
3520
 
3521
          /* Sign-extend the result for arithmetic right shifts.  */
3522
          if (code == ASHIFTRT && arg0s < 0 && arg1 > 0)
3523
            val |= ((HOST_WIDE_INT) -1) << (width - arg1);
3524
          break;
3525
 
3526
        case ROTATERT:
3527
          if (arg1 < 0)
3528
            return 0;
3529
 
3530
          arg1 %= width;
3531
          val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
3532
                 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
3533
          break;
3534
 
3535
        case ROTATE:
3536
          if (arg1 < 0)
3537
            return 0;
3538
 
3539
          arg1 %= width;
3540
          val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
3541
                 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
3542
          break;
3543
 
3544
        case COMPARE:
3545
          /* Do nothing here.  */
3546
          return 0;
3547
 
3548
        case SMIN:
3549
          val = arg0s <= arg1s ? arg0s : arg1s;
3550
          break;
3551
 
3552
        case UMIN:
3553
          val = ((unsigned HOST_WIDE_INT) arg0
3554
                 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
3555
          break;
3556
 
3557
        case SMAX:
3558
          val = arg0s > arg1s ? arg0s : arg1s;
3559
          break;
3560
 
3561
        case UMAX:
3562
          val = ((unsigned HOST_WIDE_INT) arg0
3563
                 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
3564
          break;
3565
 
3566
        case SS_PLUS:
3567
        case US_PLUS:
3568
        case SS_MINUS:
3569
        case US_MINUS:
3570
        case SS_MULT:
3571
        case US_MULT:
3572
        case SS_DIV:
3573
        case US_DIV:
3574
        case SS_ASHIFT:
3575
        case US_ASHIFT:
3576
          /* ??? There are simplifications that can be done.  */
3577
          return 0;
3578
 
3579
        default:
3580
          gcc_unreachable ();
3581
        }
3582
 
3583
      return gen_int_mode (val, mode);
3584
    }
3585
 
3586
  return NULL_RTX;
3587
}
3588
 
3589
 
3590
 
3591
/* Simplify a PLUS or MINUS, at least one of whose operands may be another
3592
   PLUS or MINUS.
3593
 
3594
   Rather than test for specific case, we do this by a brute-force method
3595
   and do all possible simplifications until no more changes occur.  Then
3596
   we rebuild the operation.  */
3597
 
3598
struct simplify_plus_minus_op_data
3599
{
3600
  rtx op;
3601
  short neg;
3602
};
3603
 
3604
static bool
3605
simplify_plus_minus_op_data_cmp (rtx x, rtx y)
3606
{
3607
  int result;
3608
 
3609
  result = (commutative_operand_precedence (y)
3610
            - commutative_operand_precedence (x));
3611
  if (result)
3612
    return result > 0;
3613
 
3614
  /* Group together equal REGs to do more simplification.  */
3615
  if (REG_P (x) && REG_P (y))
3616
    return REGNO (x) > REGNO (y);
3617
  else
3618
    return false;
3619
}
3620
 
3621
static rtx
3622
simplify_plus_minus (enum rtx_code code, enum machine_mode mode, rtx op0,
3623
                     rtx op1)
3624
{
3625
  struct simplify_plus_minus_op_data ops[8];
3626
  rtx result, tem;
3627
  int n_ops = 2, input_ops = 2;
3628
  int changed, n_constants = 0, canonicalized = 0;
3629
  int i, j;
3630
 
3631
  memset (ops, 0, sizeof ops);
3632
 
3633
  /* Set up the two operands and then expand them until nothing has been
3634
     changed.  If we run out of room in our array, give up; this should
3635
     almost never happen.  */
3636
 
3637
  ops[0].op = op0;
3638
  ops[0].neg = 0;
3639
  ops[1].op = op1;
3640
  ops[1].neg = (code == MINUS);
3641
 
3642
  do
3643
    {
3644
      changed = 0;
3645
 
3646
      for (i = 0; i < n_ops; i++)
3647
        {
3648
          rtx this_op = ops[i].op;
3649
          int this_neg = ops[i].neg;
3650
          enum rtx_code this_code = GET_CODE (this_op);
3651
 
3652
          switch (this_code)
3653
            {
3654
            case PLUS:
3655
            case MINUS:
3656
              if (n_ops == 7)
3657
                return NULL_RTX;
3658
 
3659
              ops[n_ops].op = XEXP (this_op, 1);
3660
              ops[n_ops].neg = (this_code == MINUS) ^ this_neg;
3661
              n_ops++;
3662
 
3663
              ops[i].op = XEXP (this_op, 0);
3664
              input_ops++;
3665
              changed = 1;
3666
              canonicalized |= this_neg;
3667
              break;
3668
 
3669
            case NEG:
3670
              ops[i].op = XEXP (this_op, 0);
3671
              ops[i].neg = ! this_neg;
3672
              changed = 1;
3673
              canonicalized = 1;
3674
              break;
3675
 
3676
            case CONST:
3677
              if (n_ops < 7
3678
                  && GET_CODE (XEXP (this_op, 0)) == PLUS
3679
                  && CONSTANT_P (XEXP (XEXP (this_op, 0), 0))
3680
                  && CONSTANT_P (XEXP (XEXP (this_op, 0), 1)))
3681
                {
3682
                  ops[i].op = XEXP (XEXP (this_op, 0), 0);
3683
                  ops[n_ops].op = XEXP (XEXP (this_op, 0), 1);
3684
                  ops[n_ops].neg = this_neg;
3685
                  n_ops++;
3686
                  changed = 1;
3687
                  canonicalized = 1;
3688
                }
3689
              break;
3690
 
3691
            case NOT:
3692
              /* ~a -> (-a - 1) */
3693
              if (n_ops != 7)
3694
                {
3695
                  ops[n_ops].op = constm1_rtx;
3696
                  ops[n_ops++].neg = this_neg;
3697
                  ops[i].op = XEXP (this_op, 0);
3698
                  ops[i].neg = !this_neg;
3699
                  changed = 1;
3700
                  canonicalized = 1;
3701
                }
3702
              break;
3703
 
3704
            case CONST_INT:
3705
              n_constants++;
3706
              if (this_neg)
3707
                {
3708
                  ops[i].op = neg_const_int (mode, this_op);
3709
                  ops[i].neg = 0;
3710
                  changed = 1;
3711
                  canonicalized = 1;
3712
                }
3713
              break;
3714
 
3715
            default:
3716
              break;
3717
            }
3718
        }
3719
    }
3720
  while (changed);
3721
 
3722
  if (n_constants > 1)
3723
    canonicalized = 1;
3724
 
3725
  gcc_assert (n_ops >= 2);
3726
 
3727
  /* If we only have two operands, we can avoid the loops.  */
3728
  if (n_ops == 2)
3729
    {
3730
      enum rtx_code code = ops[0].neg || ops[1].neg ? MINUS : PLUS;
3731
      rtx lhs, rhs;
3732
 
3733
      /* Get the two operands.  Be careful with the order, especially for
3734
         the cases where code == MINUS.  */
3735
      if (ops[0].neg && ops[1].neg)
3736
        {
3737
          lhs = gen_rtx_NEG (mode, ops[0].op);
3738
          rhs = ops[1].op;
3739
        }
3740
      else if (ops[0].neg)
3741
        {
3742
          lhs = ops[1].op;
3743
          rhs = ops[0].op;
3744
        }
3745
      else
3746
        {
3747
          lhs = ops[0].op;
3748
          rhs = ops[1].op;
3749
        }
3750
 
3751
      return simplify_const_binary_operation (code, mode, lhs, rhs);
3752
    }
3753
 
3754
  /* Now simplify each pair of operands until nothing changes.  */
3755
  do
3756
    {
3757
      /* Insertion sort is good enough for an eight-element array.  */
3758
      for (i = 1; i < n_ops; i++)
3759
        {
3760
          struct simplify_plus_minus_op_data save;
3761
          j = i - 1;
3762
          if (!simplify_plus_minus_op_data_cmp (ops[j].op, ops[i].op))
3763
            continue;
3764
 
3765
          canonicalized = 1;
3766
          save = ops[i];
3767
          do
3768
            ops[j + 1] = ops[j];
3769
          while (j-- && simplify_plus_minus_op_data_cmp (ops[j].op, save.op));
3770
          ops[j + 1] = save;
3771
        }
3772
 
3773
      changed = 0;
3774
      for (i = n_ops - 1; i > 0; i--)
3775
        for (j = i - 1; j >= 0; j--)
3776
          {
3777
            rtx lhs = ops[j].op, rhs = ops[i].op;
3778
            int lneg = ops[j].neg, rneg = ops[i].neg;
3779
 
3780
            if (lhs != 0 && rhs != 0)
3781
              {
3782
                enum rtx_code ncode = PLUS;
3783
 
3784
                if (lneg != rneg)
3785
                  {
3786
                    ncode = MINUS;
3787
                    if (lneg)
3788
                      tem = lhs, lhs = rhs, rhs = tem;
3789
                  }
3790
                else if (swap_commutative_operands_p (lhs, rhs))
3791
                  tem = lhs, lhs = rhs, rhs = tem;
3792
 
3793
                if ((GET_CODE (lhs) == CONST || CONST_INT_P (lhs))
3794
                    && (GET_CODE (rhs) == CONST || CONST_INT_P (rhs)))
3795
                  {
3796
                    rtx tem_lhs, tem_rhs;
3797
 
3798
                    tem_lhs = GET_CODE (lhs) == CONST ? XEXP (lhs, 0) : lhs;
3799
                    tem_rhs = GET_CODE (rhs) == CONST ? XEXP (rhs, 0) : rhs;
3800
                    tem = simplify_binary_operation (ncode, mode, tem_lhs, tem_rhs);
3801
 
3802
                    if (tem && !CONSTANT_P (tem))
3803
                      tem = gen_rtx_CONST (GET_MODE (tem), tem);
3804
                  }
3805
                else
3806
                  tem = simplify_binary_operation (ncode, mode, lhs, rhs);
3807
 
3808
                /* Reject "simplifications" that just wrap the two
3809
                   arguments in a CONST.  Failure to do so can result
3810
                   in infinite recursion with simplify_binary_operation
3811
                   when it calls us to simplify CONST operations.  */
3812
                if (tem
3813
                    && ! (GET_CODE (tem) == CONST
3814
                          && GET_CODE (XEXP (tem, 0)) == ncode
3815
                          && XEXP (XEXP (tem, 0), 0) == lhs
3816
                          && XEXP (XEXP (tem, 0), 1) == rhs))
3817
                  {
3818
                    lneg &= rneg;
3819
                    if (GET_CODE (tem) == NEG)
3820
                      tem = XEXP (tem, 0), lneg = !lneg;
3821
                    if (CONST_INT_P (tem) && lneg)
3822
                      tem = neg_const_int (mode, tem), lneg = 0;
3823
 
3824
                    ops[i].op = tem;
3825
                    ops[i].neg = lneg;
3826
                    ops[j].op = NULL_RTX;
3827
                    changed = 1;
3828
                    canonicalized = 1;
3829
                  }
3830
              }
3831
          }
3832
 
3833
      /* If nothing changed, fail.  */
3834
      if (!canonicalized)
3835
        return NULL_RTX;
3836
 
3837
      /* Pack all the operands to the lower-numbered entries.  */
3838
      for (i = 0, j = 0; j < n_ops; j++)
3839
        if (ops[j].op)
3840
          {
3841
            ops[i] = ops[j];
3842
            i++;
3843
          }
3844
      n_ops = i;
3845
    }
3846
  while (changed);
3847
 
3848
  /* Create (minus -C X) instead of (neg (const (plus X C))).  */
3849
  if (n_ops == 2
3850
      && CONST_INT_P (ops[1].op)
3851
      && CONSTANT_P (ops[0].op)
3852
      && ops[0].neg)
3853
    return gen_rtx_fmt_ee (MINUS, mode, ops[1].op, ops[0].op);
3854
 
3855
  /* We suppressed creation of trivial CONST expressions in the
3856
     combination loop to avoid recursion.  Create one manually now.
3857
     The combination loop should have ensured that there is exactly
3858
     one CONST_INT, and the sort will have ensured that it is last
3859
     in the array and that any other constant will be next-to-last.  */
3860
 
3861
  if (n_ops > 1
3862
      && CONST_INT_P (ops[n_ops - 1].op)
3863
      && CONSTANT_P (ops[n_ops - 2].op))
3864
    {
3865
      rtx value = ops[n_ops - 1].op;
3866
      if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
3867
        value = neg_const_int (mode, value);
3868
      ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value));
3869
      n_ops--;
3870
    }
3871
 
3872
  /* Put a non-negated operand first, if possible.  */
3873
 
3874
  for (i = 0; i < n_ops && ops[i].neg; i++)
3875
    continue;
3876
  if (i == n_ops)
3877
    ops[0].op = gen_rtx_NEG (mode, ops[0].op);
3878
  else if (i != 0)
3879
    {
3880
      tem = ops[0].op;
3881
      ops[0] = ops[i];
3882
      ops[i].op = tem;
3883
      ops[i].neg = 1;
3884
    }
3885
 
3886
  /* Now make the result by performing the requested operations.  */
3887
  result = ops[0].op;
3888
  for (i = 1; i < n_ops; i++)
3889
    result = gen_rtx_fmt_ee (ops[i].neg ? MINUS : PLUS,
3890
                             mode, result, ops[i].op);
3891
 
3892
  return result;
3893
}
3894
 
3895
/* Check whether an operand is suitable for calling simplify_plus_minus.  */
3896
static bool
3897
plus_minus_operand_p (const_rtx x)
3898
{
3899
  return GET_CODE (x) == PLUS
3900
         || GET_CODE (x) == MINUS
3901
         || (GET_CODE (x) == CONST
3902
             && GET_CODE (XEXP (x, 0)) == PLUS
3903
             && CONSTANT_P (XEXP (XEXP (x, 0), 0))
3904
             && CONSTANT_P (XEXP (XEXP (x, 0), 1)));
3905
}
3906
 
3907
/* Like simplify_binary_operation except used for relational operators.
3908
   MODE is the mode of the result. If MODE is VOIDmode, both operands must
3909
   not also be VOIDmode.
3910
 
3911
   CMP_MODE specifies in which mode the comparison is done in, so it is
3912
   the mode of the operands.  If CMP_MODE is VOIDmode, it is taken from
3913
   the operands or, if both are VOIDmode, the operands are compared in
3914
   "infinite precision".  */
3915
rtx
3916
simplify_relational_operation (enum rtx_code code, enum machine_mode mode,
3917
                               enum machine_mode cmp_mode, rtx op0, rtx op1)
3918
{
3919
  rtx tem, trueop0, trueop1;
3920
 
3921
  if (cmp_mode == VOIDmode)
3922
    cmp_mode = GET_MODE (op0);
3923
  if (cmp_mode == VOIDmode)
3924
    cmp_mode = GET_MODE (op1);
3925
 
3926
  tem = simplify_const_relational_operation (code, cmp_mode, op0, op1);
3927
  if (tem)
3928
    {
3929
      if (SCALAR_FLOAT_MODE_P (mode))
3930
        {
3931
          if (tem == const0_rtx)
3932
            return CONST0_RTX (mode);
3933
#ifdef FLOAT_STORE_FLAG_VALUE
3934
          {
3935
            REAL_VALUE_TYPE val;
3936
            val = FLOAT_STORE_FLAG_VALUE (mode);
3937
            return CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
3938
          }
3939
#else
3940
          return NULL_RTX;
3941
#endif
3942
        }
3943
      if (VECTOR_MODE_P (mode))
3944
        {
3945
          if (tem == const0_rtx)
3946
            return CONST0_RTX (mode);
3947
#ifdef VECTOR_STORE_FLAG_VALUE
3948
          {
3949
            int i, units;
3950
            rtvec v;
3951
 
3952
            rtx val = VECTOR_STORE_FLAG_VALUE (mode);
3953
            if (val == NULL_RTX)
3954
              return NULL_RTX;
3955
            if (val == const1_rtx)
3956
              return CONST1_RTX (mode);
3957
 
3958
            units = GET_MODE_NUNITS (mode);
3959
            v = rtvec_alloc (units);
3960
            for (i = 0; i < units; i++)
3961
              RTVEC_ELT (v, i) = val;
3962
            return gen_rtx_raw_CONST_VECTOR (mode, v);
3963
          }
3964
#else
3965
          return NULL_RTX;
3966
#endif
3967
        }
3968
 
3969
      return tem;
3970
    }
3971
 
3972
  /* For the following tests, ensure const0_rtx is op1.  */
3973
  if (swap_commutative_operands_p (op0, op1)
3974
      || (op0 == const0_rtx && op1 != const0_rtx))
3975
    tem = op0, op0 = op1, op1 = tem, code = swap_condition (code);
3976
 
3977
  /* If op0 is a compare, extract the comparison arguments from it.  */
3978
  if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
3979
    return simplify_gen_relational (code, mode, VOIDmode,
3980
                                    XEXP (op0, 0), XEXP (op0, 1));
3981
 
3982
  if (GET_MODE_CLASS (cmp_mode) == MODE_CC
3983
      || CC0_P (op0))
3984
    return NULL_RTX;
3985
 
3986
  trueop0 = avoid_constant_pool_reference (op0);
3987
  trueop1 = avoid_constant_pool_reference (op1);
3988
  return simplify_relational_operation_1 (code, mode, cmp_mode,
3989
                                          trueop0, trueop1);
3990
}
3991
 
3992
/* This part of simplify_relational_operation is only used when CMP_MODE
3993
   is not in class MODE_CC (i.e. it is a real comparison).
3994
 
3995
   MODE is the mode of the result, while CMP_MODE specifies in which
3996
   mode the comparison is done in, so it is the mode of the operands.  */
3997
 
3998
static rtx
3999
simplify_relational_operation_1 (enum rtx_code code, enum machine_mode mode,
4000
                                 enum machine_mode cmp_mode, rtx op0, rtx op1)
4001
{
4002
  enum rtx_code op0code = GET_CODE (op0);
4003
 
4004
  if (op1 == const0_rtx && COMPARISON_P (op0))
4005
    {
4006
      /* If op0 is a comparison, extract the comparison arguments
4007
         from it.  */
4008
      if (code == NE)
4009
        {
4010
          if (GET_MODE (op0) == mode)
4011
            return simplify_rtx (op0);
4012
          else
4013
            return simplify_gen_relational (GET_CODE (op0), mode, VOIDmode,
4014
                                            XEXP (op0, 0), XEXP (op0, 1));
4015
        }
4016
      else if (code == EQ)
4017
        {
4018
          enum rtx_code new_code = reversed_comparison_code (op0, NULL_RTX);
4019
          if (new_code != UNKNOWN)
4020
            return simplify_gen_relational (new_code, mode, VOIDmode,
4021
                                            XEXP (op0, 0), XEXP (op0, 1));
4022
        }
4023
    }
4024
 
4025
  /* (LTU/GEU (PLUS a C) C), where C is constant, can be simplified to
4026
     (GEU/LTU a -C).  Likewise for (LTU/GEU (PLUS a C) a).  */
4027
  if ((code == LTU || code == GEU)
4028
      && GET_CODE (op0) == PLUS
4029
      && CONST_INT_P (XEXP (op0, 1))
4030
      && (rtx_equal_p (op1, XEXP (op0, 0))
4031
          || rtx_equal_p (op1, XEXP (op0, 1))))
4032
    {
4033
      rtx new_cmp
4034
        = simplify_gen_unary (NEG, cmp_mode, XEXP (op0, 1), cmp_mode);
4035
      return simplify_gen_relational ((code == LTU ? GEU : LTU), mode,
4036
                                      cmp_mode, XEXP (op0, 0), new_cmp);
4037
    }
4038
 
4039
  /* Canonicalize (LTU/GEU (PLUS a b) b) as (LTU/GEU (PLUS a b) a).  */
4040
  if ((code == LTU || code == GEU)
4041
      && GET_CODE (op0) == PLUS
4042
      && rtx_equal_p (op1, XEXP (op0, 1))
4043
      /* Don't recurse "infinitely" for (LTU/GEU (PLUS b b) b).  */
4044
      && !rtx_equal_p (op1, XEXP (op0, 0)))
4045
    return simplify_gen_relational (code, mode, cmp_mode, op0,
4046
                                    copy_rtx (XEXP (op0, 0)));
4047
 
4048
  if (op1 == const0_rtx)
4049
    {
4050
      /* Canonicalize (GTU x 0) as (NE x 0).  */
4051
      if (code == GTU)
4052
        return simplify_gen_relational (NE, mode, cmp_mode, op0, op1);
4053
      /* Canonicalize (LEU x 0) as (EQ x 0).  */
4054
      if (code == LEU)
4055
        return simplify_gen_relational (EQ, mode, cmp_mode, op0, op1);
4056
    }
4057
  else if (op1 == const1_rtx)
4058
    {
4059
      switch (code)
4060
        {
4061
        case GE:
4062
          /* Canonicalize (GE x 1) as (GT x 0).  */
4063
          return simplify_gen_relational (GT, mode, cmp_mode,
4064
                                          op0, const0_rtx);
4065
        case GEU:
4066
          /* Canonicalize (GEU x 1) as (NE x 0).  */
4067
          return simplify_gen_relational (NE, mode, cmp_mode,
4068
                                          op0, const0_rtx);
4069
        case LT:
4070
          /* Canonicalize (LT x 1) as (LE x 0).  */
4071
          return simplify_gen_relational (LE, mode, cmp_mode,
4072
                                          op0, const0_rtx);
4073
        case LTU:
4074
          /* Canonicalize (LTU x 1) as (EQ x 0).  */
4075
          return simplify_gen_relational (EQ, mode, cmp_mode,
4076
                                          op0, const0_rtx);
4077
        default:
4078
          break;
4079
        }
4080
    }
4081
  else if (op1 == constm1_rtx)
4082
    {
4083
      /* Canonicalize (LE x -1) as (LT x 0).  */
4084
      if (code == LE)
4085
        return simplify_gen_relational (LT, mode, cmp_mode, op0, const0_rtx);
4086
      /* Canonicalize (GT x -1) as (GE x 0).  */
4087
      if (code == GT)
4088
        return simplify_gen_relational (GE, mode, cmp_mode, op0, const0_rtx);
4089
    }
4090
 
4091
  /* (eq/ne (plus x cst1) cst2) simplifies to (eq/ne x (cst2 - cst1))  */
4092
  if ((code == EQ || code == NE)
4093
      && (op0code == PLUS || op0code == MINUS)
4094
      && CONSTANT_P (op1)
4095
      && CONSTANT_P (XEXP (op0, 1))
4096
      && (INTEGRAL_MODE_P (cmp_mode) || flag_unsafe_math_optimizations))
4097
    {
4098
      rtx x = XEXP (op0, 0);
4099
      rtx c = XEXP (op0, 1);
4100
 
4101
      c = simplify_gen_binary (op0code == PLUS ? MINUS : PLUS,
4102
                               cmp_mode, op1, c);
4103
      return simplify_gen_relational (code, mode, cmp_mode, x, c);
4104
    }
4105
 
4106
  /* (ne:SI (zero_extract:SI FOO (const_int 1) BAR) (const_int 0))) is
4107
     the same as (zero_extract:SI FOO (const_int 1) BAR).  */
4108
  if (code == NE
4109
      && op1 == const0_rtx
4110
      && GET_MODE_CLASS (mode) == MODE_INT
4111
      && cmp_mode != VOIDmode
4112
      /* ??? Work-around BImode bugs in the ia64 backend.  */
4113
      && mode != BImode
4114
      && cmp_mode != BImode
4115
      && nonzero_bits (op0, cmp_mode) == 1
4116
      && STORE_FLAG_VALUE == 1)
4117
    return GET_MODE_SIZE (mode) > GET_MODE_SIZE (cmp_mode)
4118
           ? simplify_gen_unary (ZERO_EXTEND, mode, op0, cmp_mode)
4119
           : lowpart_subreg (mode, op0, cmp_mode);
4120
 
4121
  /* (eq/ne (xor x y) 0) simplifies to (eq/ne x y).  */
4122
  if ((code == EQ || code == NE)
4123
      && op1 == const0_rtx
4124
      && op0code == XOR)
4125
    return simplify_gen_relational (code, mode, cmp_mode,
4126
                                    XEXP (op0, 0), XEXP (op0, 1));
4127
 
4128
  /* (eq/ne (xor x y) x) simplifies to (eq/ne y 0).  */
4129
  if ((code == EQ || code == NE)
4130
      && op0code == XOR
4131
      && rtx_equal_p (XEXP (op0, 0), op1)
4132
      && !side_effects_p (XEXP (op0, 0)))
4133
    return simplify_gen_relational (code, mode, cmp_mode,
4134
                                    XEXP (op0, 1), const0_rtx);
4135
 
4136
  /* Likewise (eq/ne (xor x y) y) simplifies to (eq/ne x 0).  */
4137
  if ((code == EQ || code == NE)
4138
      && op0code == XOR
4139
      && rtx_equal_p (XEXP (op0, 1), op1)
4140
      && !side_effects_p (XEXP (op0, 1)))
4141
    return simplify_gen_relational (code, mode, cmp_mode,
4142
                                    XEXP (op0, 0), const0_rtx);
4143
 
4144
  /* (eq/ne (xor x C1) C2) simplifies to (eq/ne x (C1^C2)).  */
4145
  if ((code == EQ || code == NE)
4146
      && op0code == XOR
4147
      && (CONST_INT_P (op1)
4148
          || GET_CODE (op1) == CONST_DOUBLE)
4149
      && (CONST_INT_P (XEXP (op0, 1))
4150
          || GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE))
4151
    return simplify_gen_relational (code, mode, cmp_mode, XEXP (op0, 0),
4152
                                    simplify_gen_binary (XOR, cmp_mode,
4153
                                                         XEXP (op0, 1), op1));
4154
 
4155
  if (op0code == POPCOUNT && op1 == const0_rtx)
4156
    switch (code)
4157
      {
4158
      case EQ:
4159
      case LE:
4160
      case LEU:
4161
        /* (eq (popcount x) (const_int 0)) -> (eq x (const_int 0)).  */
4162
        return simplify_gen_relational (EQ, mode, GET_MODE (XEXP (op0, 0)),
4163
                                        XEXP (op0, 0), const0_rtx);
4164
 
4165
      case NE:
4166
      case GT:
4167
      case GTU:
4168
        /* (ne (popcount x) (const_int 0)) -> (ne x (const_int 0)).  */
4169
        return simplify_gen_relational (NE, mode, GET_MODE (XEXP (op0, 0)),
4170
                                        XEXP (op0, 0), const0_rtx);
4171
 
4172
      default:
4173
        break;
4174
      }
4175
 
4176
  return NULL_RTX;
4177
}
4178
 
4179
enum
4180
{
4181
  CMP_EQ = 1,
4182
  CMP_LT = 2,
4183
  CMP_GT = 4,
4184
  CMP_LTU = 8,
4185
  CMP_GTU = 16
4186
};
4187
 
4188
 
4189
/* Convert the known results for EQ, LT, GT, LTU, GTU contained in
4190
   KNOWN_RESULT to a CONST_INT, based on the requested comparison CODE
4191
   For KNOWN_RESULT to make sense it should be either CMP_EQ, or the
4192
   logical OR of one of (CMP_LT, CMP_GT) and one of (CMP_LTU, CMP_GTU).
4193
   For floating-point comparisons, assume that the operands were ordered.  */
4194
 
4195
static rtx
4196
comparison_result (enum rtx_code code, int known_results)
4197
{
4198
  switch (code)
4199
    {
4200
    case EQ:
4201
    case UNEQ:
4202
      return (known_results & CMP_EQ) ? const_true_rtx : const0_rtx;
4203
    case NE:
4204
    case LTGT:
4205
      return (known_results & CMP_EQ) ? const0_rtx : const_true_rtx;
4206
 
4207
    case LT:
4208
    case UNLT:
4209
      return (known_results & CMP_LT) ? const_true_rtx : const0_rtx;
4210
    case GE:
4211
    case UNGE:
4212
      return (known_results & CMP_LT) ? const0_rtx : const_true_rtx;
4213
 
4214
    case GT:
4215
    case UNGT:
4216
      return (known_results & CMP_GT) ? const_true_rtx : const0_rtx;
4217
    case LE:
4218
    case UNLE:
4219
      return (known_results & CMP_GT) ? const0_rtx : const_true_rtx;
4220
 
4221
    case LTU:
4222
      return (known_results & CMP_LTU) ? const_true_rtx : const0_rtx;
4223
    case GEU:
4224
      return (known_results & CMP_LTU) ? const0_rtx : const_true_rtx;
4225
 
4226
    case GTU:
4227
      return (known_results & CMP_GTU) ? const_true_rtx : const0_rtx;
4228
    case LEU:
4229
      return (known_results & CMP_GTU) ? const0_rtx : const_true_rtx;
4230
 
4231
    case ORDERED:
4232
      return const_true_rtx;
4233
    case UNORDERED:
4234
      return const0_rtx;
4235
    default:
4236
      gcc_unreachable ();
4237
    }
4238
}
4239
 
4240
/* Check if the given comparison (done in the given MODE) is actually a
4241
   tautology or a contradiction.
4242
   If no simplification is possible, this function returns zero.
4243
   Otherwise, it returns either const_true_rtx or const0_rtx.  */
4244
 
4245
rtx
4246
simplify_const_relational_operation (enum rtx_code code,
4247
                                     enum machine_mode mode,
4248
                                     rtx op0, rtx op1)
4249
{
4250
  rtx tem;
4251
  rtx trueop0;
4252
  rtx trueop1;
4253
 
4254
  gcc_assert (mode != VOIDmode
4255
              || (GET_MODE (op0) == VOIDmode
4256
                  && GET_MODE (op1) == VOIDmode));
4257
 
4258
  /* If op0 is a compare, extract the comparison arguments from it.  */
4259
  if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4260
    {
4261
      op1 = XEXP (op0, 1);
4262
      op0 = XEXP (op0, 0);
4263
 
4264
      if (GET_MODE (op0) != VOIDmode)
4265
        mode = GET_MODE (op0);
4266
      else if (GET_MODE (op1) != VOIDmode)
4267
        mode = GET_MODE (op1);
4268
      else
4269
        return 0;
4270
    }
4271
 
4272
  /* We can't simplify MODE_CC values since we don't know what the
4273
     actual comparison is.  */
4274
  if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC || CC0_P (op0))
4275
    return 0;
4276
 
4277
  /* Make sure the constant is second.  */
4278
  if (swap_commutative_operands_p (op0, op1))
4279
    {
4280
      tem = op0, op0 = op1, op1 = tem;
4281
      code = swap_condition (code);
4282
    }
4283
 
4284
  trueop0 = avoid_constant_pool_reference (op0);
4285
  trueop1 = avoid_constant_pool_reference (op1);
4286
 
4287
  /* For integer comparisons of A and B maybe we can simplify A - B and can
4288
     then simplify a comparison of that with zero.  If A and B are both either
4289
     a register or a CONST_INT, this can't help; testing for these cases will
4290
     prevent infinite recursion here and speed things up.
4291
 
4292
     We can only do this for EQ and NE comparisons as otherwise we may
4293
     lose or introduce overflow which we cannot disregard as undefined as
4294
     we do not know the signedness of the operation on either the left or
4295
     the right hand side of the comparison.  */
4296
 
4297
  if (INTEGRAL_MODE_P (mode) && trueop1 != const0_rtx
4298
      && (code == EQ || code == NE)
4299
      && ! ((REG_P (op0) || CONST_INT_P (trueop0))
4300
            && (REG_P (op1) || CONST_INT_P (trueop1)))
4301
      && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4302
      /* We cannot do this if tem is a nonzero address.  */
4303
      && ! nonzero_address_p (tem))
4304
    return simplify_const_relational_operation (signed_condition (code),
4305
                                                mode, tem, const0_rtx);
4306
 
4307
  if (! HONOR_NANS (mode) && code == ORDERED)
4308
    return const_true_rtx;
4309
 
4310
  if (! HONOR_NANS (mode) && code == UNORDERED)
4311
    return const0_rtx;
4312
 
4313
  /* For modes without NaNs, if the two operands are equal, we know the
4314
     result except if they have side-effects.  Even with NaNs we know
4315
     the result of unordered comparisons and, if signaling NaNs are
4316
     irrelevant, also the result of LT/GT/LTGT.  */
4317
  if ((! HONOR_NANS (GET_MODE (trueop0))
4318
       || code == UNEQ || code == UNLE || code == UNGE
4319
       || ((code == LT || code == GT || code == LTGT)
4320
           && ! HONOR_SNANS (GET_MODE (trueop0))))
4321
      && rtx_equal_p (trueop0, trueop1)
4322
      && ! side_effects_p (trueop0))
4323
    return comparison_result (code, CMP_EQ);
4324
 
4325
  /* If the operands are floating-point constants, see if we can fold
4326
     the result.  */
4327
  if (GET_CODE (trueop0) == CONST_DOUBLE
4328
      && GET_CODE (trueop1) == CONST_DOUBLE
4329
      && SCALAR_FLOAT_MODE_P (GET_MODE (trueop0)))
4330
    {
4331
      REAL_VALUE_TYPE d0, d1;
4332
 
4333
      REAL_VALUE_FROM_CONST_DOUBLE (d0, trueop0);
4334
      REAL_VALUE_FROM_CONST_DOUBLE (d1, trueop1);
4335
 
4336
      /* Comparisons are unordered iff at least one of the values is NaN.  */
4337
      if (REAL_VALUE_ISNAN (d0) || REAL_VALUE_ISNAN (d1))
4338
        switch (code)
4339
          {
4340
          case UNEQ:
4341
          case UNLT:
4342
          case UNGT:
4343
          case UNLE:
4344
          case UNGE:
4345
          case NE:
4346
          case UNORDERED:
4347
            return const_true_rtx;
4348
          case EQ:
4349
          case LT:
4350
          case GT:
4351
          case LE:
4352
          case GE:
4353
          case LTGT:
4354
          case ORDERED:
4355
            return const0_rtx;
4356
          default:
4357
            return 0;
4358
          }
4359
 
4360
      return comparison_result (code,
4361
                                (REAL_VALUES_EQUAL (d0, d1) ? CMP_EQ :
4362
                                 REAL_VALUES_LESS (d0, d1) ? CMP_LT : CMP_GT));
4363
    }
4364
 
4365
  /* Otherwise, see if the operands are both integers.  */
4366
  if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4367
       && (GET_CODE (trueop0) == CONST_DOUBLE
4368
           || CONST_INT_P (trueop0))
4369
       && (GET_CODE (trueop1) == CONST_DOUBLE
4370
           || CONST_INT_P (trueop1)))
4371
    {
4372
      int width = GET_MODE_BITSIZE (mode);
4373
      HOST_WIDE_INT l0s, h0s, l1s, h1s;
4374
      unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4375
 
4376
      /* Get the two words comprising each integer constant.  */
4377
      if (GET_CODE (trueop0) == CONST_DOUBLE)
4378
        {
4379
          l0u = l0s = CONST_DOUBLE_LOW (trueop0);
4380
          h0u = h0s = CONST_DOUBLE_HIGH (trueop0);
4381
        }
4382
      else
4383
        {
4384
          l0u = l0s = INTVAL (trueop0);
4385
          h0u = h0s = HWI_SIGN_EXTEND (l0s);
4386
        }
4387
 
4388
      if (GET_CODE (trueop1) == CONST_DOUBLE)
4389
        {
4390
          l1u = l1s = CONST_DOUBLE_LOW (trueop1);
4391
          h1u = h1s = CONST_DOUBLE_HIGH (trueop1);
4392
        }
4393
      else
4394
        {
4395
          l1u = l1s = INTVAL (trueop1);
4396
          h1u = h1s = HWI_SIGN_EXTEND (l1s);
4397
        }
4398
 
4399
      /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4400
         we have to sign or zero-extend the values.  */
4401
      if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4402
        {
4403
          l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4404
          l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4405
 
4406
          if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4407
            l0s |= ((HOST_WIDE_INT) (-1) << width);
4408
 
4409
          if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4410
            l1s |= ((HOST_WIDE_INT) (-1) << width);
4411
        }
4412
      if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4413
        h0u = h1u = 0, h0s = HWI_SIGN_EXTEND (l0s), h1s = HWI_SIGN_EXTEND (l1s);
4414
 
4415
      if (h0u == h1u && l0u == l1u)
4416
        return comparison_result (code, CMP_EQ);
4417
      else
4418
        {
4419
          int cr;
4420
          cr = (h0s < h1s || (h0s == h1s && l0u < l1u)) ? CMP_LT : CMP_GT;
4421
          cr |= (h0u < h1u || (h0u == h1u && l0u < l1u)) ? CMP_LTU : CMP_GTU;
4422
          return comparison_result (code, cr);
4423
        }
4424
    }
4425
 
4426
  /* Optimize comparisons with upper and lower bounds.  */
4427
  if (SCALAR_INT_MODE_P (mode)
4428
      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4429
      && CONST_INT_P (trueop1))
4430
    {
4431
      int sign;
4432
      unsigned HOST_WIDE_INT nonzero = nonzero_bits (trueop0, mode);
4433
      HOST_WIDE_INT val = INTVAL (trueop1);
4434
      HOST_WIDE_INT mmin, mmax;
4435
 
4436
      if (code == GEU
4437
          || code == LEU
4438
          || code == GTU
4439
          || code == LTU)
4440
        sign = 0;
4441
      else
4442
        sign = 1;
4443
 
4444
      /* Get a reduced range if the sign bit is zero.  */
4445
      if (nonzero <= (GET_MODE_MASK (mode) >> 1))
4446
        {
4447
          mmin = 0;
4448
          mmax = nonzero;
4449
        }
4450
      else
4451
        {
4452
          rtx mmin_rtx, mmax_rtx;
4453
          get_mode_bounds (mode, sign, mode, &mmin_rtx, &mmax_rtx);
4454
 
4455
          mmin = INTVAL (mmin_rtx);
4456
          mmax = INTVAL (mmax_rtx);
4457
          if (sign)
4458
            {
4459
              unsigned int sign_copies = num_sign_bit_copies (trueop0, mode);
4460
 
4461
              mmin >>= (sign_copies - 1);
4462
              mmax >>= (sign_copies - 1);
4463
            }
4464
        }
4465
 
4466
      switch (code)
4467
        {
4468
        /* x >= y is always true for y <= mmin, always false for y > mmax.  */
4469
        case GEU:
4470
          if ((unsigned HOST_WIDE_INT) val <= (unsigned HOST_WIDE_INT) mmin)
4471
            return const_true_rtx;
4472
          if ((unsigned HOST_WIDE_INT) val > (unsigned HOST_WIDE_INT) mmax)
4473
            return const0_rtx;
4474
          break;
4475
        case GE:
4476
          if (val <= mmin)
4477
            return const_true_rtx;
4478
          if (val > mmax)
4479
            return const0_rtx;
4480
          break;
4481
 
4482
        /* x <= y is always true for y >= mmax, always false for y < mmin.  */
4483
        case LEU:
4484
          if ((unsigned HOST_WIDE_INT) val >= (unsigned HOST_WIDE_INT) mmax)
4485
            return const_true_rtx;
4486
          if ((unsigned HOST_WIDE_INT) val < (unsigned HOST_WIDE_INT) mmin)
4487
            return const0_rtx;
4488
          break;
4489
        case LE:
4490
          if (val >= mmax)
4491
            return const_true_rtx;
4492
          if (val < mmin)
4493
            return const0_rtx;
4494
          break;
4495
 
4496
        case EQ:
4497
          /* x == y is always false for y out of range.  */
4498
          if (val < mmin || val > mmax)
4499
            return const0_rtx;
4500
          break;
4501
 
4502
        /* x > y is always false for y >= mmax, always true for y < mmin.  */
4503
        case GTU:
4504
          if ((unsigned HOST_WIDE_INT) val >= (unsigned HOST_WIDE_INT) mmax)
4505
            return const0_rtx;
4506
          if ((unsigned HOST_WIDE_INT) val < (unsigned HOST_WIDE_INT) mmin)
4507
            return const_true_rtx;
4508
          break;
4509
        case GT:
4510
          if (val >= mmax)
4511
            return const0_rtx;
4512
          if (val < mmin)
4513
            return const_true_rtx;
4514
          break;
4515
 
4516
        /* x < y is always false for y <= mmin, always true for y > mmax.  */
4517
        case LTU:
4518
          if ((unsigned HOST_WIDE_INT) val <= (unsigned HOST_WIDE_INT) mmin)
4519
            return const0_rtx;
4520
          if ((unsigned HOST_WIDE_INT) val > (unsigned HOST_WIDE_INT) mmax)
4521
            return const_true_rtx;
4522
          break;
4523
        case LT:
4524
          if (val <= mmin)
4525
            return const0_rtx;
4526
          if (val > mmax)
4527
            return const_true_rtx;
4528
          break;
4529
 
4530
        case NE:
4531
          /* x != y is always true for y out of range.  */
4532
          if (val < mmin || val > mmax)
4533
            return const_true_rtx;
4534
          break;
4535
 
4536
        default:
4537
          break;
4538
        }
4539
    }
4540
 
4541
  /* Optimize integer comparisons with zero.  */
4542
  if (trueop1 == const0_rtx)
4543
    {
4544
      /* Some addresses are known to be nonzero.  We don't know
4545
         their sign, but equality comparisons are known.  */
4546
      if (nonzero_address_p (trueop0))
4547
        {
4548
          if (code == EQ || code == LEU)
4549
            return const0_rtx;
4550
          if (code == NE || code == GTU)
4551
            return const_true_rtx;
4552
        }
4553
 
4554
      /* See if the first operand is an IOR with a constant.  If so, we
4555
         may be able to determine the result of this comparison.  */
4556
      if (GET_CODE (op0) == IOR)
4557
        {
4558
          rtx inner_const = avoid_constant_pool_reference (XEXP (op0, 1));
4559
          if (CONST_INT_P (inner_const) && inner_const != const0_rtx)
4560
            {
4561
              int sign_bitnum = GET_MODE_BITSIZE (mode) - 1;
4562
              int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4563
                              && (INTVAL (inner_const)
4564
                                  & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4565
 
4566
              switch (code)
4567
                {
4568
                case EQ:
4569
                case LEU:
4570
                  return const0_rtx;
4571
                case NE:
4572
                case GTU:
4573
                  return const_true_rtx;
4574
                case LT:
4575
                case LE:
4576
                  if (has_sign)
4577
                    return const_true_rtx;
4578
                  break;
4579
                case GT:
4580
                case GE:
4581
                  if (has_sign)
4582
                    return const0_rtx;
4583
                  break;
4584
                default:
4585
                  break;
4586
                }
4587
            }
4588
        }
4589
    }
4590
 
4591
  /* Optimize comparison of ABS with zero.  */
4592
  if (trueop1 == CONST0_RTX (mode)
4593
      && (GET_CODE (trueop0) == ABS
4594
          || (GET_CODE (trueop0) == FLOAT_EXTEND
4595
              && GET_CODE (XEXP (trueop0, 0)) == ABS)))
4596
    {
4597
      switch (code)
4598
        {
4599
        case LT:
4600
          /* Optimize abs(x) < 0.0.  */
4601
          if (!HONOR_SNANS (mode)
4602
              && (!INTEGRAL_MODE_P (mode)
4603
                  || (!flag_wrapv && !flag_trapv && flag_strict_overflow)))
4604
            {
4605
              if (INTEGRAL_MODE_P (mode)
4606
                  && (issue_strict_overflow_warning
4607
                      (WARN_STRICT_OVERFLOW_CONDITIONAL)))
4608
                warning (OPT_Wstrict_overflow,
4609
                         ("assuming signed overflow does not occur when "
4610
                          "assuming abs (x) < 0 is false"));
4611
               return const0_rtx;
4612
            }
4613
          break;
4614
 
4615
        case GE:
4616
          /* Optimize abs(x) >= 0.0.  */
4617
          if (!HONOR_NANS (mode)
4618
              && (!INTEGRAL_MODE_P (mode)
4619
                  || (!flag_wrapv && !flag_trapv && flag_strict_overflow)))
4620
            {
4621
              if (INTEGRAL_MODE_P (mode)
4622
                  && (issue_strict_overflow_warning
4623
                  (WARN_STRICT_OVERFLOW_CONDITIONAL)))
4624
                warning (OPT_Wstrict_overflow,
4625
                         ("assuming signed overflow does not occur when "
4626
                          "assuming abs (x) >= 0 is true"));
4627
              return const_true_rtx;
4628
            }
4629
          break;
4630
 
4631
        case UNGE:
4632
          /* Optimize ! (abs(x) < 0.0).  */
4633
          return const_true_rtx;
4634
 
4635
        default:
4636
          break;
4637
        }
4638
    }
4639
 
4640
  return 0;
4641
}
4642
 
4643
/* Simplify CODE, an operation with result mode MODE and three operands,
4644
   OP0, OP1, and OP2.  OP0_MODE was the mode of OP0 before it became
4645
   a constant.  Return 0 if no simplifications is possible.  */
4646
 
4647
rtx
4648
simplify_ternary_operation (enum rtx_code code, enum machine_mode mode,
4649
                            enum machine_mode op0_mode, rtx op0, rtx op1,
4650
                            rtx op2)
4651
{
4652
  unsigned int width = GET_MODE_BITSIZE (mode);
4653
 
4654
  /* VOIDmode means "infinite" precision.  */
4655
  if (width == 0)
4656
    width = HOST_BITS_PER_WIDE_INT;
4657
 
4658
  switch (code)
4659
    {
4660
    case SIGN_EXTRACT:
4661
    case ZERO_EXTRACT:
4662
      if (CONST_INT_P (op0)
4663
          && CONST_INT_P (op1)
4664
          && CONST_INT_P (op2)
4665
          && ((unsigned) INTVAL (op1) + (unsigned) INTVAL (op2) <= width)
4666
          && width <= (unsigned) HOST_BITS_PER_WIDE_INT)
4667
        {
4668
          /* Extracting a bit-field from a constant */
4669
          HOST_WIDE_INT val = INTVAL (op0);
4670
 
4671
          if (BITS_BIG_ENDIAN)
4672
            val >>= (GET_MODE_BITSIZE (op0_mode)
4673
                     - INTVAL (op2) - INTVAL (op1));
4674
          else
4675
            val >>= INTVAL (op2);
4676
 
4677
          if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4678
            {
4679
              /* First zero-extend.  */
4680
              val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4681
              /* If desired, propagate sign bit.  */
4682
              if (code == SIGN_EXTRACT
4683
                  && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4684
                val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4685
            }
4686
 
4687
          /* Clear the bits that don't belong in our mode,
4688
             unless they and our sign bit are all one.
4689
             So we get either a reasonable negative value or a reasonable
4690
             unsigned value for this mode.  */
4691
          if (width < HOST_BITS_PER_WIDE_INT
4692
              && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4693
                  != ((HOST_WIDE_INT) (-1) << (width - 1))))
4694
            val &= ((HOST_WIDE_INT) 1 << width) - 1;
4695
 
4696
          return gen_int_mode (val, mode);
4697
        }
4698
      break;
4699
 
4700
    case IF_THEN_ELSE:
4701
      if (CONST_INT_P (op0))
4702
        return op0 != const0_rtx ? op1 : op2;
4703
 
4704
      /* Convert c ? a : a into "a".  */
4705
      if (rtx_equal_p (op1, op2) && ! side_effects_p (op0))
4706
        return op1;
4707
 
4708
      /* Convert a != b ? a : b into "a".  */
4709
      if (GET_CODE (op0) == NE
4710
          && ! side_effects_p (op0)
4711
          && ! HONOR_NANS (mode)
4712
          && ! HONOR_SIGNED_ZEROS (mode)
4713
          && ((rtx_equal_p (XEXP (op0, 0), op1)
4714
               && rtx_equal_p (XEXP (op0, 1), op2))
4715
              || (rtx_equal_p (XEXP (op0, 0), op2)
4716
                  && rtx_equal_p (XEXP (op0, 1), op1))))
4717
        return op1;
4718
 
4719
      /* Convert a == b ? a : b into "b".  */
4720
      if (GET_CODE (op0) == EQ
4721
          && ! side_effects_p (op0)
4722
          && ! HONOR_NANS (mode)
4723
          && ! HONOR_SIGNED_ZEROS (mode)
4724
          && ((rtx_equal_p (XEXP (op0, 0), op1)
4725
               && rtx_equal_p (XEXP (op0, 1), op2))
4726
              || (rtx_equal_p (XEXP (op0, 0), op2)
4727
                  && rtx_equal_p (XEXP (op0, 1), op1))))
4728
        return op2;
4729
 
4730
      if (COMPARISON_P (op0) && ! side_effects_p (op0))
4731
        {
4732
          enum machine_mode cmp_mode = (GET_MODE (XEXP (op0, 0)) == VOIDmode
4733
                                        ? GET_MODE (XEXP (op0, 1))
4734
                                        : GET_MODE (XEXP (op0, 0)));
4735
          rtx temp;
4736
 
4737
          /* Look for happy constants in op1 and op2.  */
4738
          if (CONST_INT_P (op1) && CONST_INT_P (op2))
4739
            {
4740
              HOST_WIDE_INT t = INTVAL (op1);
4741
              HOST_WIDE_INT f = INTVAL (op2);
4742
 
4743
              if (t == STORE_FLAG_VALUE && f == 0)
4744
                code = GET_CODE (op0);
4745
              else if (t == 0 && f == STORE_FLAG_VALUE)
4746
                {
4747
                  enum rtx_code tmp;
4748
                  tmp = reversed_comparison_code (op0, NULL_RTX);
4749
                  if (tmp == UNKNOWN)
4750
                    break;
4751
                  code = tmp;
4752
                }
4753
              else
4754
                break;
4755
 
4756
              return simplify_gen_relational (code, mode, cmp_mode,
4757
                                              XEXP (op0, 0), XEXP (op0, 1));
4758
            }
4759
 
4760
          if (cmp_mode == VOIDmode)
4761
            cmp_mode = op0_mode;
4762
          temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
4763
                                                cmp_mode, XEXP (op0, 0),
4764
                                                XEXP (op0, 1));
4765
 
4766
          /* See if any simplifications were possible.  */
4767
          if (temp)
4768
            {
4769
              if (CONST_INT_P (temp))
4770
                return temp == const0_rtx ? op2 : op1;
4771
              else if (temp)
4772
                return gen_rtx_IF_THEN_ELSE (mode, temp, op1, op2);
4773
            }
4774
        }
4775
      break;
4776
 
4777
    case VEC_MERGE:
4778
      gcc_assert (GET_MODE (op0) == mode);
4779
      gcc_assert (GET_MODE (op1) == mode);
4780
      gcc_assert (VECTOR_MODE_P (mode));
4781
      op2 = avoid_constant_pool_reference (op2);
4782
      if (CONST_INT_P (op2))
4783
        {
4784
          int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4785
          unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
4786
          int mask = (1 << n_elts) - 1;
4787
 
4788
          if (!(INTVAL (op2) & mask))
4789
            return op1;
4790
          if ((INTVAL (op2) & mask) == mask)
4791
            return op0;
4792
 
4793
          op0 = avoid_constant_pool_reference (op0);
4794
          op1 = avoid_constant_pool_reference (op1);
4795
          if (GET_CODE (op0) == CONST_VECTOR
4796
              && GET_CODE (op1) == CONST_VECTOR)
4797
            {
4798
              rtvec v = rtvec_alloc (n_elts);
4799
              unsigned int i;
4800
 
4801
              for (i = 0; i < n_elts; i++)
4802
                RTVEC_ELT (v, i) = (INTVAL (op2) & (1 << i)
4803
                                    ? CONST_VECTOR_ELT (op0, i)
4804
                                    : CONST_VECTOR_ELT (op1, i));
4805
              return gen_rtx_CONST_VECTOR (mode, v);
4806
            }
4807
        }
4808
      break;
4809
 
4810
    default:
4811
      gcc_unreachable ();
4812
    }
4813
 
4814
  return 0;
4815
}
4816
 
4817
/* Evaluate a SUBREG of a CONST_INT or CONST_DOUBLE or CONST_FIXED
4818
   or CONST_VECTOR,
4819
   returning another CONST_INT or CONST_DOUBLE or CONST_FIXED or CONST_VECTOR.
4820
 
4821
   Works by unpacking OP into a collection of 8-bit values
4822
   represented as a little-endian array of 'unsigned char', selecting by BYTE,
4823
   and then repacking them again for OUTERMODE.  */
4824
 
4825
static rtx
4826
simplify_immed_subreg (enum machine_mode outermode, rtx op,
4827
                       enum machine_mode innermode, unsigned int byte)
4828
{
4829
  /* We support up to 512-bit values (for V8DFmode).  */
4830
  enum {
4831
    max_bitsize = 512,
4832
    value_bit = 8,
4833
    value_mask = (1 << value_bit) - 1
4834
  };
4835
  unsigned char value[max_bitsize / value_bit];
4836
  int value_start;
4837
  int i;
4838
  int elem;
4839
 
4840
  int num_elem;
4841
  rtx * elems;
4842
  int elem_bitsize;
4843
  rtx result_s;
4844
  rtvec result_v = NULL;
4845
  enum mode_class outer_class;
4846
  enum machine_mode outer_submode;
4847
 
4848
  /* Some ports misuse CCmode.  */
4849
  if (GET_MODE_CLASS (outermode) == MODE_CC && CONST_INT_P (op))
4850
    return op;
4851
 
4852
  /* We have no way to represent a complex constant at the rtl level.  */
4853
  if (COMPLEX_MODE_P (outermode))
4854
    return NULL_RTX;
4855
 
4856
  /* Unpack the value.  */
4857
 
4858
  if (GET_CODE (op) == CONST_VECTOR)
4859
    {
4860
      num_elem = CONST_VECTOR_NUNITS (op);
4861
      elems = &CONST_VECTOR_ELT (op, 0);
4862
      elem_bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (innermode));
4863
    }
4864
  else
4865
    {
4866
      num_elem = 1;
4867
      elems = &op;
4868
      elem_bitsize = max_bitsize;
4869
    }
4870
  /* If this asserts, it is too complicated; reducing value_bit may help.  */
4871
  gcc_assert (BITS_PER_UNIT % value_bit == 0);
4872
  /* I don't know how to handle endianness of sub-units.  */
4873
  gcc_assert (elem_bitsize % BITS_PER_UNIT == 0);
4874
 
4875
  for (elem = 0; elem < num_elem; elem++)
4876
    {
4877
      unsigned char * vp;
4878
      rtx el = elems[elem];
4879
 
4880
      /* Vectors are kept in target memory order.  (This is probably
4881
         a mistake.)  */
4882
      {
4883
        unsigned byte = (elem * elem_bitsize) / BITS_PER_UNIT;
4884
        unsigned ibyte = (((num_elem - 1 - elem) * elem_bitsize)
4885
                          / BITS_PER_UNIT);
4886
        unsigned word_byte = WORDS_BIG_ENDIAN ? ibyte : byte;
4887
        unsigned subword_byte = BYTES_BIG_ENDIAN ? ibyte : byte;
4888
        unsigned bytele = (subword_byte % UNITS_PER_WORD
4889
                         + (word_byte / UNITS_PER_WORD) * UNITS_PER_WORD);
4890
        vp = value + (bytele * BITS_PER_UNIT) / value_bit;
4891
      }
4892
 
4893
      switch (GET_CODE (el))
4894
        {
4895
        case CONST_INT:
4896
          for (i = 0;
4897
               i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize;
4898
               i += value_bit)
4899
            *vp++ = INTVAL (el) >> i;
4900
          /* CONST_INTs are always logically sign-extended.  */
4901
          for (; i < elem_bitsize; i += value_bit)
4902
            *vp++ = INTVAL (el) < 0 ? -1 : 0;
4903
          break;
4904
 
4905
        case CONST_DOUBLE:
4906
          if (GET_MODE (el) == VOIDmode)
4907
            {
4908
              /* If this triggers, someone should have generated a
4909
                 CONST_INT instead.  */
4910
              gcc_assert (elem_bitsize > HOST_BITS_PER_WIDE_INT);
4911
 
4912
              for (i = 0; i < HOST_BITS_PER_WIDE_INT; i += value_bit)
4913
                *vp++ = CONST_DOUBLE_LOW (el) >> i;
4914
              while (i < HOST_BITS_PER_WIDE_INT * 2 && i < elem_bitsize)
4915
                {
4916
                  *vp++
4917
                    = CONST_DOUBLE_HIGH (el) >> (i - HOST_BITS_PER_WIDE_INT);
4918
                  i += value_bit;
4919
                }
4920
              /* It shouldn't matter what's done here, so fill it with
4921
                 zero.  */
4922
              for (; i < elem_bitsize; i += value_bit)
4923
                *vp++ = 0;
4924
            }
4925
          else
4926
            {
4927
              long tmp[max_bitsize / 32];
4928
              int bitsize = GET_MODE_BITSIZE (GET_MODE (el));
4929
 
4930
              gcc_assert (SCALAR_FLOAT_MODE_P (GET_MODE (el)));
4931
              gcc_assert (bitsize <= elem_bitsize);
4932
              gcc_assert (bitsize % value_bit == 0);
4933
 
4934
              real_to_target (tmp, CONST_DOUBLE_REAL_VALUE (el),
4935
                              GET_MODE (el));
4936
 
4937
              /* real_to_target produces its result in words affected by
4938
                 FLOAT_WORDS_BIG_ENDIAN.  However, we ignore this,
4939
                 and use WORDS_BIG_ENDIAN instead; see the documentation
4940
                 of SUBREG in rtl.texi.  */
4941
              for (i = 0; i < bitsize; i += value_bit)
4942
                {
4943
                  int ibase;
4944
                  if (WORDS_BIG_ENDIAN)
4945
                    ibase = bitsize - 1 - i;
4946
                  else
4947
                    ibase = i;
4948
                  *vp++ = tmp[ibase / 32] >> i % 32;
4949
                }
4950
 
4951
              /* It shouldn't matter what's done here, so fill it with
4952
                 zero.  */
4953
              for (; i < elem_bitsize; i += value_bit)
4954
                *vp++ = 0;
4955
            }
4956
          break;
4957
 
4958
        case CONST_FIXED:
4959
          if (elem_bitsize <= HOST_BITS_PER_WIDE_INT)
4960
            {
4961
              for (i = 0; i < elem_bitsize; i += value_bit)
4962
                *vp++ = CONST_FIXED_VALUE_LOW (el) >> i;
4963
            }
4964
          else
4965
            {
4966
              for (i = 0; i < HOST_BITS_PER_WIDE_INT; i += value_bit)
4967
                *vp++ = CONST_FIXED_VALUE_LOW (el) >> i;
4968
              for (; i < 2 * HOST_BITS_PER_WIDE_INT && i < elem_bitsize;
4969
                   i += value_bit)
4970
                *vp++ = CONST_FIXED_VALUE_HIGH (el)
4971
                        >> (i - HOST_BITS_PER_WIDE_INT);
4972
              for (; i < elem_bitsize; i += value_bit)
4973
                *vp++ = 0;
4974
            }
4975
          break;
4976
 
4977
        default:
4978
          gcc_unreachable ();
4979
        }
4980
    }
4981
 
4982
  /* Now, pick the right byte to start with.  */
4983
  /* Renumber BYTE so that the least-significant byte is byte 0.  A special
4984
     case is paradoxical SUBREGs, which shouldn't be adjusted since they
4985
     will already have offset 0.  */
4986
  if (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode))
4987
    {
4988
      unsigned ibyte = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode)
4989
                        - byte);
4990
      unsigned word_byte = WORDS_BIG_ENDIAN ? ibyte : byte;
4991
      unsigned subword_byte = BYTES_BIG_ENDIAN ? ibyte : byte;
4992
      byte = (subword_byte % UNITS_PER_WORD
4993
              + (word_byte / UNITS_PER_WORD) * UNITS_PER_WORD);
4994
    }
4995
 
4996
  /* BYTE should still be inside OP.  (Note that BYTE is unsigned,
4997
     so if it's become negative it will instead be very large.)  */
4998
  gcc_assert (byte < GET_MODE_SIZE (innermode));
4999
 
5000
  /* Convert from bytes to chunks of size value_bit.  */
5001
  value_start = byte * (BITS_PER_UNIT / value_bit);
5002
 
5003
  /* Re-pack the value.  */
5004
 
5005
  if (VECTOR_MODE_P (outermode))
5006
    {
5007
      num_elem = GET_MODE_NUNITS (outermode);
5008
      result_v = rtvec_alloc (num_elem);
5009
      elems = &RTVEC_ELT (result_v, 0);
5010
      outer_submode = GET_MODE_INNER (outermode);
5011
    }
5012
  else
5013
    {
5014
      num_elem = 1;
5015
      elems = &result_s;
5016
      outer_submode = outermode;
5017
    }
5018
 
5019
  outer_class = GET_MODE_CLASS (outer_submode);
5020
  elem_bitsize = GET_MODE_BITSIZE (outer_submode);
5021
 
5022
  gcc_assert (elem_bitsize % value_bit == 0);
5023
  gcc_assert (elem_bitsize + value_start * value_bit <= max_bitsize);
5024
 
5025
  for (elem = 0; elem < num_elem; elem++)
5026
    {
5027
      unsigned char *vp;
5028
 
5029
      /* Vectors are stored in target memory order.  (This is probably
5030
         a mistake.)  */
5031
      {
5032
        unsigned byte = (elem * elem_bitsize) / BITS_PER_UNIT;
5033
        unsigned ibyte = (((num_elem - 1 - elem) * elem_bitsize)
5034
                          / BITS_PER_UNIT);
5035
        unsigned word_byte = WORDS_BIG_ENDIAN ? ibyte : byte;
5036
        unsigned subword_byte = BYTES_BIG_ENDIAN ? ibyte : byte;
5037
        unsigned bytele = (subword_byte % UNITS_PER_WORD
5038
                         + (word_byte / UNITS_PER_WORD) * UNITS_PER_WORD);
5039
        vp = value + value_start + (bytele * BITS_PER_UNIT) / value_bit;
5040
      }
5041
 
5042
      switch (outer_class)
5043
        {
5044
        case MODE_INT:
5045
        case MODE_PARTIAL_INT:
5046
          {
5047
            unsigned HOST_WIDE_INT hi = 0, lo = 0;
5048
 
5049
            for (i = 0;
5050
                 i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize;
5051
                 i += value_bit)
5052
              lo |= (HOST_WIDE_INT)(*vp++ & value_mask) << i;
5053
            for (; i < elem_bitsize; i += value_bit)
5054
              hi |= ((HOST_WIDE_INT)(*vp++ & value_mask)
5055
                     << (i - HOST_BITS_PER_WIDE_INT));
5056
 
5057
            /* immed_double_const doesn't call trunc_int_for_mode.  I don't
5058
               know why.  */
5059
            if (elem_bitsize <= HOST_BITS_PER_WIDE_INT)
5060
              elems[elem] = gen_int_mode (lo, outer_submode);
5061
            else if (elem_bitsize <= 2 * HOST_BITS_PER_WIDE_INT)
5062
              elems[elem] = immed_double_const (lo, hi, outer_submode);
5063
            else
5064
              return NULL_RTX;
5065
          }
5066
          break;
5067
 
5068
        case MODE_FLOAT:
5069
        case MODE_DECIMAL_FLOAT:
5070
          {
5071
            REAL_VALUE_TYPE r;
5072
            long tmp[max_bitsize / 32];
5073
 
5074
            /* real_from_target wants its input in words affected by
5075
               FLOAT_WORDS_BIG_ENDIAN.  However, we ignore this,
5076
               and use WORDS_BIG_ENDIAN instead; see the documentation
5077
               of SUBREG in rtl.texi.  */
5078
            for (i = 0; i < max_bitsize / 32; i++)
5079
              tmp[i] = 0;
5080
            for (i = 0; i < elem_bitsize; i += value_bit)
5081
              {
5082
                int ibase;
5083
                if (WORDS_BIG_ENDIAN)
5084
                  ibase = elem_bitsize - 1 - i;
5085
                else
5086
                  ibase = i;
5087
                tmp[ibase / 32] |= (*vp++ & value_mask) << i % 32;
5088
              }
5089
 
5090
            real_from_target (&r, tmp, outer_submode);
5091
            elems[elem] = CONST_DOUBLE_FROM_REAL_VALUE (r, outer_submode);
5092
          }
5093
          break;
5094
 
5095
        case MODE_FRACT:
5096
        case MODE_UFRACT:
5097
        case MODE_ACCUM:
5098
        case MODE_UACCUM:
5099
          {
5100
            FIXED_VALUE_TYPE f;
5101
            f.data.low = 0;
5102
            f.data.high = 0;
5103
            f.mode = outer_submode;
5104
 
5105
            for (i = 0;
5106
                 i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize;
5107
                 i += value_bit)
5108
              f.data.low |= (HOST_WIDE_INT)(*vp++ & value_mask) << i;
5109
            for (; i < elem_bitsize; i += value_bit)
5110
              f.data.high |= ((HOST_WIDE_INT)(*vp++ & value_mask)
5111
                             << (i - HOST_BITS_PER_WIDE_INT));
5112
 
5113
            elems[elem] = CONST_FIXED_FROM_FIXED_VALUE (f, outer_submode);
5114
          }
5115
          break;
5116
 
5117
        default:
5118
          gcc_unreachable ();
5119
        }
5120
    }
5121
  if (VECTOR_MODE_P (outermode))
5122
    return gen_rtx_CONST_VECTOR (outermode, result_v);
5123
  else
5124
    return result_s;
5125
}
5126
 
5127
/* Simplify SUBREG:OUTERMODE(OP:INNERMODE, BYTE)
5128
   Return 0 if no simplifications are possible.  */
5129
rtx
5130
simplify_subreg (enum machine_mode outermode, rtx op,
5131
                 enum machine_mode innermode, unsigned int byte)
5132
{
5133
  /* Little bit of sanity checking.  */
5134
  gcc_assert (innermode != VOIDmode);
5135
  gcc_assert (outermode != VOIDmode);
5136
  gcc_assert (innermode != BLKmode);
5137
  gcc_assert (outermode != BLKmode);
5138
 
5139
  gcc_assert (GET_MODE (op) == innermode
5140
              || GET_MODE (op) == VOIDmode);
5141
 
5142
  gcc_assert ((byte % GET_MODE_SIZE (outermode)) == 0);
5143
  gcc_assert (byte < GET_MODE_SIZE (innermode));
5144
 
5145
  if (outermode == innermode && !byte)
5146
    return op;
5147
 
5148
  if (CONST_INT_P (op)
5149
      || GET_CODE (op) == CONST_DOUBLE
5150
      || GET_CODE (op) == CONST_FIXED
5151
      || GET_CODE (op) == CONST_VECTOR)
5152
    return simplify_immed_subreg (outermode, op, innermode, byte);
5153
 
5154
  /* Changing mode twice with SUBREG => just change it once,
5155
     or not at all if changing back op starting mode.  */
5156
  if (GET_CODE (op) == SUBREG)
5157
    {
5158
      enum machine_mode innermostmode = GET_MODE (SUBREG_REG (op));
5159
      int final_offset = byte + SUBREG_BYTE (op);
5160
      rtx newx;
5161
 
5162
      if (outermode == innermostmode
5163
          && byte == 0 && SUBREG_BYTE (op) == 0)
5164
        return SUBREG_REG (op);
5165
 
5166
      /* The SUBREG_BYTE represents offset, as if the value were stored
5167
         in memory.  Irritating exception is paradoxical subreg, where
5168
         we define SUBREG_BYTE to be 0.  On big endian machines, this
5169
         value should be negative.  For a moment, undo this exception.  */
5170
      if (byte == 0 && GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
5171
        {
5172
          int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
5173
          if (WORDS_BIG_ENDIAN)
5174
            final_offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
5175
          if (BYTES_BIG_ENDIAN)
5176
            final_offset += difference % UNITS_PER_WORD;
5177
        }
5178
      if (SUBREG_BYTE (op) == 0
5179
          && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
5180
        {
5181
          int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
5182
          if (WORDS_BIG_ENDIAN)
5183
            final_offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
5184
          if (BYTES_BIG_ENDIAN)
5185
            final_offset += difference % UNITS_PER_WORD;
5186
        }
5187
 
5188
      /* See whether resulting subreg will be paradoxical.  */
5189
      if (GET_MODE_SIZE (innermostmode) > GET_MODE_SIZE (outermode))
5190
        {
5191
          /* In nonparadoxical subregs we can't handle negative offsets.  */
5192
          if (final_offset < 0)
5193
            return NULL_RTX;
5194
          /* Bail out in case resulting subreg would be incorrect.  */
5195
          if (final_offset % GET_MODE_SIZE (outermode)
5196
              || (unsigned) final_offset >= GET_MODE_SIZE (innermostmode))
5197
            return NULL_RTX;
5198
        }
5199
      else
5200
        {
5201
          int offset = 0;
5202
          int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (outermode));
5203
 
5204
          /* In paradoxical subreg, see if we are still looking on lower part.
5205
             If so, our SUBREG_BYTE will be 0.  */
5206
          if (WORDS_BIG_ENDIAN)
5207
            offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
5208
          if (BYTES_BIG_ENDIAN)
5209
            offset += difference % UNITS_PER_WORD;
5210
          if (offset == final_offset)
5211
            final_offset = 0;
5212
          else
5213
            return NULL_RTX;
5214
        }
5215
 
5216
      /* Recurse for further possible simplifications.  */
5217
      newx = simplify_subreg (outermode, SUBREG_REG (op), innermostmode,
5218
                              final_offset);
5219
      if (newx)
5220
        return newx;
5221
      if (validate_subreg (outermode, innermostmode,
5222
                           SUBREG_REG (op), final_offset))
5223
        {
5224
          newx = gen_rtx_SUBREG (outermode, SUBREG_REG (op), final_offset);
5225
          if (SUBREG_PROMOTED_VAR_P (op)
5226
              && SUBREG_PROMOTED_UNSIGNED_P (op) >= 0
5227
              && GET_MODE_CLASS (outermode) == MODE_INT
5228
              && IN_RANGE (GET_MODE_SIZE (outermode),
5229
                           GET_MODE_SIZE (innermode),
5230
                           GET_MODE_SIZE (innermostmode))
5231
              && subreg_lowpart_p (newx))
5232
            {
5233
              SUBREG_PROMOTED_VAR_P (newx) = 1;
5234
              SUBREG_PROMOTED_UNSIGNED_SET
5235
                (newx, SUBREG_PROMOTED_UNSIGNED_P (op));
5236
            }
5237
          return newx;
5238
        }
5239
      return NULL_RTX;
5240
    }
5241
 
5242
  /* Merge implicit and explicit truncations.  */
5243
 
5244
  if (GET_CODE (op) == TRUNCATE
5245
      && GET_MODE_SIZE (outermode) < GET_MODE_SIZE (innermode)
5246
      && subreg_lowpart_offset (outermode, innermode) == byte)
5247
    return simplify_gen_unary (TRUNCATE, outermode, XEXP (op, 0),
5248
                               GET_MODE (XEXP (op, 0)));
5249
 
5250
  /* SUBREG of a hard register => just change the register number
5251
     and/or mode.  If the hard register is not valid in that mode,
5252
     suppress this simplification.  If the hard register is the stack,
5253
     frame, or argument pointer, leave this as a SUBREG.  */
5254
 
5255
  if (REG_P (op) && HARD_REGISTER_P (op))
5256
    {
5257
      unsigned int regno, final_regno;
5258
 
5259
      regno = REGNO (op);
5260
      final_regno = simplify_subreg_regno (regno, innermode, byte, outermode);
5261
      if (HARD_REGISTER_NUM_P (final_regno))
5262
        {
5263
          rtx x;
5264
          int final_offset = byte;
5265
 
5266
          /* Adjust offset for paradoxical subregs.  */
5267
          if (byte == 0
5268
              && GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
5269
            {
5270
              int difference = (GET_MODE_SIZE (innermode)
5271
                                - GET_MODE_SIZE (outermode));
5272
              if (WORDS_BIG_ENDIAN)
5273
                final_offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
5274
              if (BYTES_BIG_ENDIAN)
5275
                final_offset += difference % UNITS_PER_WORD;
5276
            }
5277
 
5278
          x = gen_rtx_REG_offset (op, outermode, final_regno, final_offset);
5279
 
5280
          /* Propagate original regno.  We don't have any way to specify
5281
             the offset inside original regno, so do so only for lowpart.
5282
             The information is used only by alias analysis that can not
5283
             grog partial register anyway.  */
5284
 
5285
          if (subreg_lowpart_offset (outermode, innermode) == byte)
5286
            ORIGINAL_REGNO (x) = ORIGINAL_REGNO (op);
5287
          return x;
5288
        }
5289
    }
5290
 
5291
  /* If we have a SUBREG of a register that we are replacing and we are
5292
     replacing it with a MEM, make a new MEM and try replacing the
5293
     SUBREG with it.  Don't do this if the MEM has a mode-dependent address
5294
     or if we would be widening it.  */
5295
 
5296
  if (MEM_P (op)
5297
      && ! mode_dependent_address_p (XEXP (op, 0))
5298
      /* Allow splitting of volatile memory references in case we don't
5299
         have instruction to move the whole thing.  */
5300
      && (! MEM_VOLATILE_P (op)
5301
          || ! have_insn_for (SET, innermode))
5302
      && GET_MODE_SIZE (outermode) <= GET_MODE_SIZE (GET_MODE (op)))
5303
    return adjust_address_nv (op, outermode, byte);
5304
 
5305
  /* Handle complex values represented as CONCAT
5306
     of real and imaginary part.  */
5307
  if (GET_CODE (op) == CONCAT)
5308
    {
5309
      unsigned int part_size, final_offset;
5310
      rtx part, res;
5311
 
5312
      part_size = GET_MODE_UNIT_SIZE (GET_MODE (XEXP (op, 0)));
5313
      if (byte < part_size)
5314
        {
5315
          part = XEXP (op, 0);
5316
          final_offset = byte;
5317
        }
5318
      else
5319
        {
5320
          part = XEXP (op, 1);
5321
          final_offset = byte - part_size;
5322
        }
5323
 
5324
      if (final_offset + GET_MODE_SIZE (outermode) > part_size)
5325
        return NULL_RTX;
5326
 
5327
      res = simplify_subreg (outermode, part, GET_MODE (part), final_offset);
5328
      if (res)
5329
        return res;
5330
      if (validate_subreg (outermode, GET_MODE (part), part, final_offset))
5331
        return gen_rtx_SUBREG (outermode, part, final_offset);
5332
      return NULL_RTX;
5333
    }
5334
 
5335
  /* Optimize SUBREG truncations of zero and sign extended values.  */
5336
  if ((GET_CODE (op) == ZERO_EXTEND
5337
       || GET_CODE (op) == SIGN_EXTEND)
5338
      && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode))
5339
    {
5340
      unsigned int bitpos = subreg_lsb_1 (outermode, innermode, byte);
5341
 
5342
      /* If we're requesting the lowpart of a zero or sign extension,
5343
         there are three possibilities.  If the outermode is the same
5344
         as the origmode, we can omit both the extension and the subreg.
5345
         If the outermode is not larger than the origmode, we can apply
5346
         the truncation without the extension.  Finally, if the outermode
5347
         is larger than the origmode, but both are integer modes, we
5348
         can just extend to the appropriate mode.  */
5349
      if (bitpos == 0)
5350
        {
5351
          enum machine_mode origmode = GET_MODE (XEXP (op, 0));
5352
          if (outermode == origmode)
5353
            return XEXP (op, 0);
5354
          if (GET_MODE_BITSIZE (outermode) <= GET_MODE_BITSIZE (origmode))
5355
            return simplify_gen_subreg (outermode, XEXP (op, 0), origmode,
5356
                                        subreg_lowpart_offset (outermode,
5357
                                                               origmode));
5358
          if (SCALAR_INT_MODE_P (outermode))
5359
            return simplify_gen_unary (GET_CODE (op), outermode,
5360
                                       XEXP (op, 0), origmode);
5361
        }
5362
 
5363
      /* A SUBREG resulting from a zero extension may fold to zero if
5364
         it extracts higher bits that the ZERO_EXTEND's source bits.  */
5365
      if (GET_CODE (op) == ZERO_EXTEND
5366
          && bitpos >= GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0))))
5367
        return CONST0_RTX (outermode);
5368
    }
5369
 
5370
  /* Simplify (subreg:QI (lshiftrt:SI (sign_extend:SI (x:QI)) C), 0) into
5371
     to (ashiftrt:QI (x:QI) C), where C is a suitable small constant and
5372
     the outer subreg is effectively a truncation to the original mode.  */
5373
  if ((GET_CODE (op) == LSHIFTRT
5374
       || GET_CODE (op) == ASHIFTRT)
5375
      && SCALAR_INT_MODE_P (outermode)
5376
      /* Ensure that OUTERMODE is at least twice as wide as the INNERMODE
5377
         to avoid the possibility that an outer LSHIFTRT shifts by more
5378
         than the sign extension's sign_bit_copies and introduces zeros
5379
         into the high bits of the result.  */
5380
      && (2 * GET_MODE_BITSIZE (outermode)) <= GET_MODE_BITSIZE (innermode)
5381
      && CONST_INT_P (XEXP (op, 1))
5382
      && GET_CODE (XEXP (op, 0)) == SIGN_EXTEND
5383
      && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
5384
      && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode)
5385
      && subreg_lsb_1 (outermode, innermode, byte) == 0)
5386
    return simplify_gen_binary (ASHIFTRT, outermode,
5387
                                XEXP (XEXP (op, 0), 0), XEXP (op, 1));
5388
 
5389
  /* Likewise (subreg:QI (lshiftrt:SI (zero_extend:SI (x:QI)) C), 0) into
5390
     to (lshiftrt:QI (x:QI) C), where C is a suitable small constant and
5391
     the outer subreg is effectively a truncation to the original mode.  */
5392
  if ((GET_CODE (op) == LSHIFTRT
5393
       || GET_CODE (op) == ASHIFTRT)
5394
      && SCALAR_INT_MODE_P (outermode)
5395
      && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode)
5396
      && CONST_INT_P (XEXP (op, 1))
5397
      && GET_CODE (XEXP (op, 0)) == ZERO_EXTEND
5398
      && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
5399
      && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode)
5400
      && subreg_lsb_1 (outermode, innermode, byte) == 0)
5401
    return simplify_gen_binary (LSHIFTRT, outermode,
5402
                                XEXP (XEXP (op, 0), 0), XEXP (op, 1));
5403
 
5404
  /* Likewise (subreg:QI (ashift:SI (zero_extend:SI (x:QI)) C), 0) into
5405
     to (ashift:QI (x:QI) C), where C is a suitable small constant and
5406
     the outer subreg is effectively a truncation to the original mode.  */
5407
  if (GET_CODE (op) == ASHIFT
5408
      && SCALAR_INT_MODE_P (outermode)
5409
      && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode)
5410
      && CONST_INT_P (XEXP (op, 1))
5411
      && (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND
5412
          || GET_CODE (XEXP (op, 0)) == SIGN_EXTEND)
5413
      && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
5414
      && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode)
5415
      && subreg_lsb_1 (outermode, innermode, byte) == 0)
5416
    return simplify_gen_binary (ASHIFT, outermode,
5417
                                XEXP (XEXP (op, 0), 0), XEXP (op, 1));
5418
 
5419
  /* Recognize a word extraction from a multi-word subreg.  */
5420
  if ((GET_CODE (op) == LSHIFTRT
5421
       || GET_CODE (op) == ASHIFTRT)
5422
      && SCALAR_INT_MODE_P (outermode)
5423
      && GET_MODE_BITSIZE (outermode) >= BITS_PER_WORD
5424
      && GET_MODE_BITSIZE (innermode) >= (2 * GET_MODE_BITSIZE (outermode))
5425
      && CONST_INT_P (XEXP (op, 1))
5426
      && (INTVAL (XEXP (op, 1)) & (GET_MODE_BITSIZE (outermode) - 1)) == 0
5427
      && INTVAL (XEXP (op, 1)) >= 0
5428
      && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (innermode)
5429
      && byte == subreg_lowpart_offset (outermode, innermode))
5430
    {
5431
      int shifted_bytes = INTVAL (XEXP (op, 1)) / BITS_PER_UNIT;
5432
      return simplify_gen_subreg (outermode, XEXP (op, 0), innermode,
5433
                                  (WORDS_BIG_ENDIAN
5434
                                   ? byte - shifted_bytes
5435
                                   : byte + shifted_bytes));
5436
    }
5437
 
5438
  return NULL_RTX;
5439
}
5440
 
5441
/* Make a SUBREG operation or equivalent if it folds.  */
5442
 
5443
rtx
5444
simplify_gen_subreg (enum machine_mode outermode, rtx op,
5445
                     enum machine_mode innermode, unsigned int byte)
5446
{
5447
  rtx newx;
5448
 
5449
  newx = simplify_subreg (outermode, op, innermode, byte);
5450
  if (newx)
5451
    return newx;
5452
 
5453
  if (GET_CODE (op) == SUBREG
5454
      || GET_CODE (op) == CONCAT
5455
      || GET_MODE (op) == VOIDmode)
5456
    return NULL_RTX;
5457
 
5458
  if (validate_subreg (outermode, innermode, op, byte))
5459
    return gen_rtx_SUBREG (outermode, op, byte);
5460
 
5461
  return NULL_RTX;
5462
}
5463
 
5464
/* Simplify X, an rtx expression.
5465
 
5466
   Return the simplified expression or NULL if no simplifications
5467
   were possible.
5468
 
5469
   This is the preferred entry point into the simplification routines;
5470
   however, we still allow passes to call the more specific routines.
5471
 
5472
   Right now GCC has three (yes, three) major bodies of RTL simplification
5473
   code that need to be unified.
5474
 
5475
        1. fold_rtx in cse.c.  This code uses various CSE specific
5476
           information to aid in RTL simplification.
5477
 
5478
        2. simplify_rtx in combine.c.  Similar to fold_rtx, except that
5479
           it uses combine specific information to aid in RTL
5480
           simplification.
5481
 
5482
        3. The routines in this file.
5483
 
5484
 
5485
   Long term we want to only have one body of simplification code; to
5486
   get to that state I recommend the following steps:
5487
 
5488
        1. Pour over fold_rtx & simplify_rtx and move any simplifications
5489
           which are not pass dependent state into these routines.
5490
 
5491
        2. As code is moved by #1, change fold_rtx & simplify_rtx to
5492
           use this routine whenever possible.
5493
 
5494
        3. Allow for pass dependent state to be provided to these
5495
           routines and add simplifications based on the pass dependent
5496
           state.  Remove code from cse.c & combine.c that becomes
5497
           redundant/dead.
5498
 
5499
    It will take time, but ultimately the compiler will be easier to
5500
    maintain and improve.  It's totally silly that when we add a
5501
    simplification that it needs to be added to 4 places (3 for RTL
5502
    simplification and 1 for tree simplification.  */
5503
 
5504
rtx
5505
simplify_rtx (const_rtx x)
5506
{
5507
  const enum rtx_code code = GET_CODE (x);
5508
  const enum machine_mode mode = GET_MODE (x);
5509
 
5510
  switch (GET_RTX_CLASS (code))
5511
    {
5512
    case RTX_UNARY:
5513
      return simplify_unary_operation (code, mode,
5514
                                       XEXP (x, 0), GET_MODE (XEXP (x, 0)));
5515
    case RTX_COMM_ARITH:
5516
      if (swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
5517
        return simplify_gen_binary (code, mode, XEXP (x, 1), XEXP (x, 0));
5518
 
5519
      /* Fall through....  */
5520
 
5521
    case RTX_BIN_ARITH:
5522
      return simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
5523
 
5524
    case RTX_TERNARY:
5525
    case RTX_BITFIELD_OPS:
5526
      return simplify_ternary_operation (code, mode, GET_MODE (XEXP (x, 0)),
5527
                                         XEXP (x, 0), XEXP (x, 1),
5528
                                         XEXP (x, 2));
5529
 
5530
    case RTX_COMPARE:
5531
    case RTX_COMM_COMPARE:
5532
      return simplify_relational_operation (code, mode,
5533
                                            ((GET_MODE (XEXP (x, 0))
5534
                                             != VOIDmode)
5535
                                            ? GET_MODE (XEXP (x, 0))
5536
                                            : GET_MODE (XEXP (x, 1))),
5537
                                            XEXP (x, 0),
5538
                                            XEXP (x, 1));
5539
 
5540
    case RTX_EXTRA:
5541
      if (code == SUBREG)
5542
        return simplify_subreg (mode, SUBREG_REG (x),
5543
                                GET_MODE (SUBREG_REG (x)),
5544
                                SUBREG_BYTE (x));
5545
      break;
5546
 
5547
    case RTX_OBJ:
5548
      if (code == LO_SUM)
5549
        {
5550
          /* Convert (lo_sum (high FOO) FOO) to FOO.  */
5551
          if (GET_CODE (XEXP (x, 0)) == HIGH
5552
              && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
5553
          return XEXP (x, 1);
5554
        }
5555
      break;
5556
 
5557
    default:
5558
      break;
5559
    }
5560
  return NULL;
5561
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.