OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [rtlanal.c] - Blame information for rev 154

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Analyze RTL for GNU compiler.
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software
4
   Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
 
23
#include "config.h"
24
#include "system.h"
25
#include "coretypes.h"
26
#include "tm.h"
27
#include "toplev.h"
28
#include "rtl.h"
29
#include "hard-reg-set.h"
30
#include "insn-config.h"
31
#include "recog.h"
32
#include "target.h"
33
#include "output.h"
34
#include "tm_p.h"
35
#include "flags.h"
36
#include "real.h"
37
#include "regs.h"
38
#include "function.h"
39
 
40
/* Forward declarations */
41
static void set_of_1 (rtx, rtx, void *);
42
static bool covers_regno_p (rtx, unsigned int);
43
static bool covers_regno_no_parallel_p (rtx, unsigned int);
44
static int rtx_referenced_p_1 (rtx *, void *);
45
static int computed_jump_p_1 (rtx);
46
static void parms_set (rtx, rtx, void *);
47
 
48
static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
49
                                                   rtx, enum machine_mode,
50
                                                   unsigned HOST_WIDE_INT);
51
static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
52
                                             enum machine_mode,
53
                                             unsigned HOST_WIDE_INT);
54
static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
55
                                                enum machine_mode,
56
                                                unsigned int);
57
static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
58
                                          enum machine_mode, unsigned int);
59
 
60
/* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
61
   -1 if a code has no such operand.  */
62
static int non_rtx_starting_operands[NUM_RTX_CODE];
63
 
64
/* Bit flags that specify the machine subtype we are compiling for.
65
   Bits are tested using macros TARGET_... defined in the tm.h file
66
   and set by `-m...' switches.  Must be defined in rtlanal.c.  */
67
 
68
int target_flags;
69
 
70
/* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
71
   If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
72
   SIGN_EXTEND then while narrowing we also have to enforce the
73
   representation and sign-extend the value to mode DESTINATION_REP.
74
 
75
   If the value is already sign-extended to DESTINATION_REP mode we
76
   can just switch to DESTINATION mode on it.  For each pair of
77
   integral modes SOURCE and DESTINATION, when truncating from SOURCE
78
   to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
79
   contains the number of high-order bits in SOURCE that have to be
80
   copies of the sign-bit so that we can do this mode-switch to
81
   DESTINATION.  */
82
 
83
static unsigned int
84
num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
85
 
86
/* Return 1 if the value of X is unstable
87
   (would be different at a different point in the program).
88
   The frame pointer, arg pointer, etc. are considered stable
89
   (within one function) and so is anything marked `unchanging'.  */
90
 
91
int
92
rtx_unstable_p (rtx x)
93
{
94
  RTX_CODE code = GET_CODE (x);
95
  int i;
96
  const char *fmt;
97
 
98
  switch (code)
99
    {
100
    case MEM:
101
      return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
102
 
103
    case CONST:
104
    case CONST_INT:
105
    case CONST_DOUBLE:
106
    case CONST_VECTOR:
107
    case SYMBOL_REF:
108
    case LABEL_REF:
109
      return 0;
110
 
111
    case REG:
112
      /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
113
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
114
          /* The arg pointer varies if it is not a fixed register.  */
115
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
116
        return 0;
117
#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
118
      /* ??? When call-clobbered, the value is stable modulo the restore
119
         that must happen after a call.  This currently screws up local-alloc
120
         into believing that the restore is not needed.  */
121
      if (x == pic_offset_table_rtx)
122
        return 0;
123
#endif
124
      return 1;
125
 
126
    case ASM_OPERANDS:
127
      if (MEM_VOLATILE_P (x))
128
        return 1;
129
 
130
      /* Fall through.  */
131
 
132
    default:
133
      break;
134
    }
135
 
136
  fmt = GET_RTX_FORMAT (code);
137
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
138
    if (fmt[i] == 'e')
139
      {
140
        if (rtx_unstable_p (XEXP (x, i)))
141
          return 1;
142
      }
143
    else if (fmt[i] == 'E')
144
      {
145
        int j;
146
        for (j = 0; j < XVECLEN (x, i); j++)
147
          if (rtx_unstable_p (XVECEXP (x, i, j)))
148
            return 1;
149
      }
150
 
151
  return 0;
152
}
153
 
154
/* Return 1 if X has a value that can vary even between two
155
   executions of the program.  0 means X can be compared reliably
156
   against certain constants or near-constants.
157
   FOR_ALIAS is nonzero if we are called from alias analysis; if it is
158
   zero, we are slightly more conservative.
159
   The frame pointer and the arg pointer are considered constant.  */
160
 
161
int
162
rtx_varies_p (rtx x, int for_alias)
163
{
164
  RTX_CODE code;
165
  int i;
166
  const char *fmt;
167
 
168
  if (!x)
169
    return 0;
170
 
171
  code = GET_CODE (x);
172
  switch (code)
173
    {
174
    case MEM:
175
      return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
176
 
177
    case CONST:
178
    case CONST_INT:
179
    case CONST_DOUBLE:
180
    case CONST_VECTOR:
181
    case SYMBOL_REF:
182
    case LABEL_REF:
183
      return 0;
184
 
185
    case REG:
186
      /* Note that we have to test for the actual rtx used for the frame
187
         and arg pointers and not just the register number in case we have
188
         eliminated the frame and/or arg pointer and are using it
189
         for pseudos.  */
190
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
191
          /* The arg pointer varies if it is not a fixed register.  */
192
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
193
        return 0;
194
      if (x == pic_offset_table_rtx
195
#ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
196
          /* ??? When call-clobbered, the value is stable modulo the restore
197
             that must happen after a call.  This currently screws up
198
             local-alloc into believing that the restore is not needed, so we
199
             must return 0 only if we are called from alias analysis.  */
200
          && for_alias
201
#endif
202
          )
203
        return 0;
204
      return 1;
205
 
206
    case LO_SUM:
207
      /* The operand 0 of a LO_SUM is considered constant
208
         (in fact it is related specifically to operand 1)
209
         during alias analysis.  */
210
      return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
211
             || rtx_varies_p (XEXP (x, 1), for_alias);
212
 
213
    case ASM_OPERANDS:
214
      if (MEM_VOLATILE_P (x))
215
        return 1;
216
 
217
      /* Fall through.  */
218
 
219
    default:
220
      break;
221
    }
222
 
223
  fmt = GET_RTX_FORMAT (code);
224
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
225
    if (fmt[i] == 'e')
226
      {
227
        if (rtx_varies_p (XEXP (x, i), for_alias))
228
          return 1;
229
      }
230
    else if (fmt[i] == 'E')
231
      {
232
        int j;
233
        for (j = 0; j < XVECLEN (x, i); j++)
234
          if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
235
            return 1;
236
      }
237
 
238
  return 0;
239
}
240
 
241
/* Return nonzero if the use of X as an address in a MEM can cause a trap.
242
   MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
243
   whether nonzero is returned for unaligned memory accesses on strict
244
   alignment machines.  */
245
 
246
static int
247
rtx_addr_can_trap_p_1 (rtx x, enum machine_mode mode, bool unaligned_mems)
248
{
249
  enum rtx_code code = GET_CODE (x);
250
 
251
  switch (code)
252
    {
253
    case SYMBOL_REF:
254
      return SYMBOL_REF_WEAK (x);
255
 
256
    case LABEL_REF:
257
      return 0;
258
 
259
    case REG:
260
      /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
261
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
262
          || x == stack_pointer_rtx
263
          /* The arg pointer varies if it is not a fixed register.  */
264
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
265
        return 0;
266
      /* All of the virtual frame registers are stack references.  */
267
      if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
268
          && REGNO (x) <= LAST_VIRTUAL_REGISTER)
269
        return 0;
270
      return 1;
271
 
272
    case CONST:
273
      return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
274
 
275
    case PLUS:
276
      /* An address is assumed not to trap if:
277
         - it is an address that can't trap plus a constant integer,
278
           with the proper remainder modulo the mode size if we are
279
           considering unaligned memory references.  */
280
      if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
281
          && GET_CODE (XEXP (x, 1)) == CONST_INT)
282
        {
283
          HOST_WIDE_INT offset;
284
 
285
          if (!STRICT_ALIGNMENT
286
              || !unaligned_mems
287
              || GET_MODE_SIZE (mode) == 0)
288
            return 0;
289
 
290
          offset = INTVAL (XEXP (x, 1));
291
 
292
#ifdef SPARC_STACK_BOUNDARY_HACK
293
          /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
294
             the real alignment of %sp.  However, when it does this, the
295
             alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
296
          if (SPARC_STACK_BOUNDARY_HACK
297
              && (XEXP (x, 0) == stack_pointer_rtx
298
                  || XEXP (x, 0) == hard_frame_pointer_rtx))
299
            offset -= STACK_POINTER_OFFSET;
300
#endif
301
 
302
          return offset % GET_MODE_SIZE (mode) != 0;
303
        }
304
 
305
      /* - or it is the pic register plus a constant.  */
306
      if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
307
        return 0;
308
 
309
      return 1;
310
 
311
    case LO_SUM:
312
    case PRE_MODIFY:
313
      return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
314
 
315
    case PRE_DEC:
316
    case PRE_INC:
317
    case POST_DEC:
318
    case POST_INC:
319
    case POST_MODIFY:
320
      return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
321
 
322
    default:
323
      break;
324
    }
325
 
326
  /* If it isn't one of the case above, it can cause a trap.  */
327
  return 1;
328
}
329
 
330
/* Return nonzero if the use of X as an address in a MEM can cause a trap.  */
331
 
332
int
333
rtx_addr_can_trap_p (rtx x)
334
{
335
  return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
336
}
337
 
338
/* Return true if X is an address that is known to not be zero.  */
339
 
340
bool
341
nonzero_address_p (rtx x)
342
{
343
  enum rtx_code code = GET_CODE (x);
344
 
345
  switch (code)
346
    {
347
    case SYMBOL_REF:
348
      return !SYMBOL_REF_WEAK (x);
349
 
350
    case LABEL_REF:
351
      return true;
352
 
353
    case REG:
354
      /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
355
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
356
          || x == stack_pointer_rtx
357
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
358
        return true;
359
      /* All of the virtual frame registers are stack references.  */
360
      if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
361
          && REGNO (x) <= LAST_VIRTUAL_REGISTER)
362
        return true;
363
      return false;
364
 
365
    case CONST:
366
      return nonzero_address_p (XEXP (x, 0));
367
 
368
    case PLUS:
369
      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
370
        return nonzero_address_p (XEXP (x, 0));
371
      /* Handle PIC references.  */
372
      else if (XEXP (x, 0) == pic_offset_table_rtx
373
               && CONSTANT_P (XEXP (x, 1)))
374
        return true;
375
      return false;
376
 
377
    case PRE_MODIFY:
378
      /* Similar to the above; allow positive offsets.  Further, since
379
         auto-inc is only allowed in memories, the register must be a
380
         pointer.  */
381
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
382
          && INTVAL (XEXP (x, 1)) > 0)
383
        return true;
384
      return nonzero_address_p (XEXP (x, 0));
385
 
386
    case PRE_INC:
387
      /* Similarly.  Further, the offset is always positive.  */
388
      return true;
389
 
390
    case PRE_DEC:
391
    case POST_DEC:
392
    case POST_INC:
393
    case POST_MODIFY:
394
      return nonzero_address_p (XEXP (x, 0));
395
 
396
    case LO_SUM:
397
      return nonzero_address_p (XEXP (x, 1));
398
 
399
    default:
400
      break;
401
    }
402
 
403
  /* If it isn't one of the case above, might be zero.  */
404
  return false;
405
}
406
 
407
/* Return 1 if X refers to a memory location whose address
408
   cannot be compared reliably with constant addresses,
409
   or if X refers to a BLKmode memory object.
410
   FOR_ALIAS is nonzero if we are called from alias analysis; if it is
411
   zero, we are slightly more conservative.  */
412
 
413
int
414
rtx_addr_varies_p (rtx x, int for_alias)
415
{
416
  enum rtx_code code;
417
  int i;
418
  const char *fmt;
419
 
420
  if (x == 0)
421
    return 0;
422
 
423
  code = GET_CODE (x);
424
  if (code == MEM)
425
    return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
426
 
427
  fmt = GET_RTX_FORMAT (code);
428
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
429
    if (fmt[i] == 'e')
430
      {
431
        if (rtx_addr_varies_p (XEXP (x, i), for_alias))
432
          return 1;
433
      }
434
    else if (fmt[i] == 'E')
435
      {
436
        int j;
437
        for (j = 0; j < XVECLEN (x, i); j++)
438
          if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
439
            return 1;
440
      }
441
  return 0;
442
}
443
 
444
/* Return the value of the integer term in X, if one is apparent;
445
   otherwise return 0.
446
   Only obvious integer terms are detected.
447
   This is used in cse.c with the `related_value' field.  */
448
 
449
HOST_WIDE_INT
450
get_integer_term (rtx x)
451
{
452
  if (GET_CODE (x) == CONST)
453
    x = XEXP (x, 0);
454
 
455
  if (GET_CODE (x) == MINUS
456
      && GET_CODE (XEXP (x, 1)) == CONST_INT)
457
    return - INTVAL (XEXP (x, 1));
458
  if (GET_CODE (x) == PLUS
459
      && GET_CODE (XEXP (x, 1)) == CONST_INT)
460
    return INTVAL (XEXP (x, 1));
461
  return 0;
462
}
463
 
464
/* If X is a constant, return the value sans apparent integer term;
465
   otherwise return 0.
466
   Only obvious integer terms are detected.  */
467
 
468
rtx
469
get_related_value (rtx x)
470
{
471
  if (GET_CODE (x) != CONST)
472
    return 0;
473
  x = XEXP (x, 0);
474
  if (GET_CODE (x) == PLUS
475
      && GET_CODE (XEXP (x, 1)) == CONST_INT)
476
    return XEXP (x, 0);
477
  else if (GET_CODE (x) == MINUS
478
           && GET_CODE (XEXP (x, 1)) == CONST_INT)
479
    return XEXP (x, 0);
480
  return 0;
481
}
482
 
483
/* Return the number of places FIND appears within X.  If COUNT_DEST is
484
   zero, we do not count occurrences inside the destination of a SET.  */
485
 
486
int
487
count_occurrences (rtx x, rtx find, int count_dest)
488
{
489
  int i, j;
490
  enum rtx_code code;
491
  const char *format_ptr;
492
  int count;
493
 
494
  if (x == find)
495
    return 1;
496
 
497
  code = GET_CODE (x);
498
 
499
  switch (code)
500
    {
501
    case REG:
502
    case CONST_INT:
503
    case CONST_DOUBLE:
504
    case CONST_VECTOR:
505
    case SYMBOL_REF:
506
    case CODE_LABEL:
507
    case PC:
508
    case CC0:
509
      return 0;
510
 
511
    case MEM:
512
      if (MEM_P (find) && rtx_equal_p (x, find))
513
        return 1;
514
      break;
515
 
516
    case SET:
517
      if (SET_DEST (x) == find && ! count_dest)
518
        return count_occurrences (SET_SRC (x), find, count_dest);
519
      break;
520
 
521
    default:
522
      break;
523
    }
524
 
525
  format_ptr = GET_RTX_FORMAT (code);
526
  count = 0;
527
 
528
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
529
    {
530
      switch (*format_ptr++)
531
        {
532
        case 'e':
533
          count += count_occurrences (XEXP (x, i), find, count_dest);
534
          break;
535
 
536
        case 'E':
537
          for (j = 0; j < XVECLEN (x, i); j++)
538
            count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
539
          break;
540
        }
541
    }
542
  return count;
543
}
544
 
545
/* Nonzero if register REG appears somewhere within IN.
546
   Also works if REG is not a register; in this case it checks
547
   for a subexpression of IN that is Lisp "equal" to REG.  */
548
 
549
int
550
reg_mentioned_p (rtx reg, rtx in)
551
{
552
  const char *fmt;
553
  int i;
554
  enum rtx_code code;
555
 
556
  if (in == 0)
557
    return 0;
558
 
559
  if (reg == in)
560
    return 1;
561
 
562
  if (GET_CODE (in) == LABEL_REF)
563
    return reg == XEXP (in, 0);
564
 
565
  code = GET_CODE (in);
566
 
567
  switch (code)
568
    {
569
      /* Compare registers by number.  */
570
    case REG:
571
      return REG_P (reg) && REGNO (in) == REGNO (reg);
572
 
573
      /* These codes have no constituent expressions
574
         and are unique.  */
575
    case SCRATCH:
576
    case CC0:
577
    case PC:
578
      return 0;
579
 
580
    case CONST_INT:
581
    case CONST_VECTOR:
582
    case CONST_DOUBLE:
583
      /* These are kept unique for a given value.  */
584
      return 0;
585
 
586
    default:
587
      break;
588
    }
589
 
590
  if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
591
    return 1;
592
 
593
  fmt = GET_RTX_FORMAT (code);
594
 
595
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
596
    {
597
      if (fmt[i] == 'E')
598
        {
599
          int j;
600
          for (j = XVECLEN (in, i) - 1; j >= 0; j--)
601
            if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
602
              return 1;
603
        }
604
      else if (fmt[i] == 'e'
605
               && reg_mentioned_p (reg, XEXP (in, i)))
606
        return 1;
607
    }
608
  return 0;
609
}
610
 
611
/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
612
   no CODE_LABEL insn.  */
613
 
614
int
615
no_labels_between_p (rtx beg, rtx end)
616
{
617
  rtx p;
618
  if (beg == end)
619
    return 0;
620
  for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
621
    if (LABEL_P (p))
622
      return 0;
623
  return 1;
624
}
625
 
626
/* Nonzero if register REG is used in an insn between
627
   FROM_INSN and TO_INSN (exclusive of those two).  */
628
 
629
int
630
reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
631
{
632
  rtx insn;
633
 
634
  if (from_insn == to_insn)
635
    return 0;
636
 
637
  for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
638
    if (INSN_P (insn)
639
        && (reg_overlap_mentioned_p (reg, PATTERN (insn))
640
           || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
641
      return 1;
642
  return 0;
643
}
644
 
645
/* Nonzero if the old value of X, a register, is referenced in BODY.  If X
646
   is entirely replaced by a new value and the only use is as a SET_DEST,
647
   we do not consider it a reference.  */
648
 
649
int
650
reg_referenced_p (rtx x, rtx body)
651
{
652
  int i;
653
 
654
  switch (GET_CODE (body))
655
    {
656
    case SET:
657
      if (reg_overlap_mentioned_p (x, SET_SRC (body)))
658
        return 1;
659
 
660
      /* If the destination is anything other than CC0, PC, a REG or a SUBREG
661
         of a REG that occupies all of the REG, the insn references X if
662
         it is mentioned in the destination.  */
663
      if (GET_CODE (SET_DEST (body)) != CC0
664
          && GET_CODE (SET_DEST (body)) != PC
665
          && !REG_P (SET_DEST (body))
666
          && ! (GET_CODE (SET_DEST (body)) == SUBREG
667
                && REG_P (SUBREG_REG (SET_DEST (body)))
668
                && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
669
                      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
670
                    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
671
                         + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
672
          && reg_overlap_mentioned_p (x, SET_DEST (body)))
673
        return 1;
674
      return 0;
675
 
676
    case ASM_OPERANDS:
677
      for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
678
        if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
679
          return 1;
680
      return 0;
681
 
682
    case CALL:
683
    case USE:
684
    case IF_THEN_ELSE:
685
      return reg_overlap_mentioned_p (x, body);
686
 
687
    case TRAP_IF:
688
      return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
689
 
690
    case PREFETCH:
691
      return reg_overlap_mentioned_p (x, XEXP (body, 0));
692
 
693
    case UNSPEC:
694
    case UNSPEC_VOLATILE:
695
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
696
        if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
697
          return 1;
698
      return 0;
699
 
700
    case PARALLEL:
701
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
702
        if (reg_referenced_p (x, XVECEXP (body, 0, i)))
703
          return 1;
704
      return 0;
705
 
706
    case CLOBBER:
707
      if (MEM_P (XEXP (body, 0)))
708
        if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
709
          return 1;
710
      return 0;
711
 
712
    case COND_EXEC:
713
      if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
714
        return 1;
715
      return reg_referenced_p (x, COND_EXEC_CODE (body));
716
 
717
    default:
718
      return 0;
719
    }
720
}
721
 
722
/* Nonzero if register REG is set or clobbered in an insn between
723
   FROM_INSN and TO_INSN (exclusive of those two).  */
724
 
725
int
726
reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
727
{
728
  rtx insn;
729
 
730
  if (from_insn == to_insn)
731
    return 0;
732
 
733
  for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
734
    if (INSN_P (insn) && reg_set_p (reg, insn))
735
      return 1;
736
  return 0;
737
}
738
 
739
/* Internals of reg_set_between_p.  */
740
int
741
reg_set_p (rtx reg, rtx insn)
742
{
743
  /* We can be passed an insn or part of one.  If we are passed an insn,
744
     check if a side-effect of the insn clobbers REG.  */
745
  if (INSN_P (insn)
746
      && (FIND_REG_INC_NOTE (insn, reg)
747
          || (CALL_P (insn)
748
              && ((REG_P (reg)
749
                   && REGNO (reg) < FIRST_PSEUDO_REGISTER
750
                   && TEST_HARD_REG_BIT (regs_invalidated_by_call,
751
                                         REGNO (reg)))
752
                  || MEM_P (reg)
753
                  || find_reg_fusage (insn, CLOBBER, reg)))))
754
    return 1;
755
 
756
  return set_of (reg, insn) != NULL_RTX;
757
}
758
 
759
/* Similar to reg_set_between_p, but check all registers in X.  Return 0
760
   only if none of them are modified between START and END.  Return 1 if
761
   X contains a MEM; this routine does usememory aliasing.  */
762
 
763
int
764
modified_between_p (rtx x, rtx start, rtx end)
765
{
766
  enum rtx_code code = GET_CODE (x);
767
  const char *fmt;
768
  int i, j;
769
  rtx insn;
770
 
771
  if (start == end)
772
    return 0;
773
 
774
  switch (code)
775
    {
776
    case CONST_INT:
777
    case CONST_DOUBLE:
778
    case CONST_VECTOR:
779
    case CONST:
780
    case SYMBOL_REF:
781
    case LABEL_REF:
782
      return 0;
783
 
784
    case PC:
785
    case CC0:
786
      return 1;
787
 
788
    case MEM:
789
      if (modified_between_p (XEXP (x, 0), start, end))
790
        return 1;
791
      if (MEM_READONLY_P (x))
792
        return 0;
793
      for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
794
        if (memory_modified_in_insn_p (x, insn))
795
          return 1;
796
      return 0;
797
      break;
798
 
799
    case REG:
800
      return reg_set_between_p (x, start, end);
801
 
802
    default:
803
      break;
804
    }
805
 
806
  fmt = GET_RTX_FORMAT (code);
807
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
808
    {
809
      if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
810
        return 1;
811
 
812
      else if (fmt[i] == 'E')
813
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
814
          if (modified_between_p (XVECEXP (x, i, j), start, end))
815
            return 1;
816
    }
817
 
818
  return 0;
819
}
820
 
821
/* Similar to reg_set_p, but check all registers in X.  Return 0 only if none
822
   of them are modified in INSN.  Return 1 if X contains a MEM; this routine
823
   does use memory aliasing.  */
824
 
825
int
826
modified_in_p (rtx x, rtx insn)
827
{
828
  enum rtx_code code = GET_CODE (x);
829
  const char *fmt;
830
  int i, j;
831
 
832
  switch (code)
833
    {
834
    case CONST_INT:
835
    case CONST_DOUBLE:
836
    case CONST_VECTOR:
837
    case CONST:
838
    case SYMBOL_REF:
839
    case LABEL_REF:
840
      return 0;
841
 
842
    case PC:
843
    case CC0:
844
      return 1;
845
 
846
    case MEM:
847
      if (modified_in_p (XEXP (x, 0), insn))
848
        return 1;
849
      if (MEM_READONLY_P (x))
850
        return 0;
851
      if (memory_modified_in_insn_p (x, insn))
852
        return 1;
853
      return 0;
854
      break;
855
 
856
    case REG:
857
      return reg_set_p (x, insn);
858
 
859
    default:
860
      break;
861
    }
862
 
863
  fmt = GET_RTX_FORMAT (code);
864
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
865
    {
866
      if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
867
        return 1;
868
 
869
      else if (fmt[i] == 'E')
870
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
871
          if (modified_in_p (XVECEXP (x, i, j), insn))
872
            return 1;
873
    }
874
 
875
  return 0;
876
}
877
 
878
/* Helper function for set_of.  */
879
struct set_of_data
880
  {
881
    rtx found;
882
    rtx pat;
883
  };
884
 
885
static void
886
set_of_1 (rtx x, rtx pat, void *data1)
887
{
888
   struct set_of_data *data = (struct set_of_data *) (data1);
889
   if (rtx_equal_p (x, data->pat)
890
       || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
891
     data->found = pat;
892
}
893
 
894
/* Give an INSN, return a SET or CLOBBER expression that does modify PAT
895
   (either directly or via STRICT_LOW_PART and similar modifiers).  */
896
rtx
897
set_of (rtx pat, rtx insn)
898
{
899
  struct set_of_data data;
900
  data.found = NULL_RTX;
901
  data.pat = pat;
902
  note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
903
  return data.found;
904
}
905
 
906
/* Given an INSN, return a SET expression if this insn has only a single SET.
907
   It may also have CLOBBERs, USEs, or SET whose output
908
   will not be used, which we ignore.  */
909
 
910
rtx
911
single_set_2 (rtx insn, rtx pat)
912
{
913
  rtx set = NULL;
914
  int set_verified = 1;
915
  int i;
916
 
917
  if (GET_CODE (pat) == PARALLEL)
918
    {
919
      for (i = 0; i < XVECLEN (pat, 0); i++)
920
        {
921
          rtx sub = XVECEXP (pat, 0, i);
922
          switch (GET_CODE (sub))
923
            {
924
            case USE:
925
            case CLOBBER:
926
              break;
927
 
928
            case SET:
929
              /* We can consider insns having multiple sets, where all
930
                 but one are dead as single set insns.  In common case
931
                 only single set is present in the pattern so we want
932
                 to avoid checking for REG_UNUSED notes unless necessary.
933
 
934
                 When we reach set first time, we just expect this is
935
                 the single set we are looking for and only when more
936
                 sets are found in the insn, we check them.  */
937
              if (!set_verified)
938
                {
939
                  if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
940
                      && !side_effects_p (set))
941
                    set = NULL;
942
                  else
943
                    set_verified = 1;
944
                }
945
              if (!set)
946
                set = sub, set_verified = 0;
947
              else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
948
                       || side_effects_p (sub))
949
                return NULL_RTX;
950
              break;
951
 
952
            default:
953
              return NULL_RTX;
954
            }
955
        }
956
    }
957
  return set;
958
}
959
 
960
/* Given an INSN, return nonzero if it has more than one SET, else return
961
   zero.  */
962
 
963
int
964
multiple_sets (rtx insn)
965
{
966
  int found;
967
  int i;
968
 
969
  /* INSN must be an insn.  */
970
  if (! INSN_P (insn))
971
    return 0;
972
 
973
  /* Only a PARALLEL can have multiple SETs.  */
974
  if (GET_CODE (PATTERN (insn)) == PARALLEL)
975
    {
976
      for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
977
        if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
978
          {
979
            /* If we have already found a SET, then return now.  */
980
            if (found)
981
              return 1;
982
            else
983
              found = 1;
984
          }
985
    }
986
 
987
  /* Either zero or one SET.  */
988
  return 0;
989
}
990
 
991
/* Return nonzero if the destination of SET equals the source
992
   and there are no side effects.  */
993
 
994
int
995
set_noop_p (rtx set)
996
{
997
  rtx src = SET_SRC (set);
998
  rtx dst = SET_DEST (set);
999
 
1000
  if (dst == pc_rtx && src == pc_rtx)
1001
    return 1;
1002
 
1003
  if (MEM_P (dst) && MEM_P (src))
1004
    return rtx_equal_p (dst, src) && !side_effects_p (dst);
1005
 
1006
  if (GET_CODE (dst) == ZERO_EXTRACT)
1007
    return rtx_equal_p (XEXP (dst, 0), src)
1008
           && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1009
           && !side_effects_p (src);
1010
 
1011
  if (GET_CODE (dst) == STRICT_LOW_PART)
1012
    dst = XEXP (dst, 0);
1013
 
1014
  if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1015
    {
1016
      if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1017
        return 0;
1018
      src = SUBREG_REG (src);
1019
      dst = SUBREG_REG (dst);
1020
    }
1021
 
1022
  return (REG_P (src) && REG_P (dst)
1023
          && REGNO (src) == REGNO (dst));
1024
}
1025
 
1026
/* Return nonzero if an insn consists only of SETs, each of which only sets a
1027
   value to itself.  */
1028
 
1029
int
1030
noop_move_p (rtx insn)
1031
{
1032
  rtx pat = PATTERN (insn);
1033
 
1034
  if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1035
    return 1;
1036
 
1037
  /* Insns carrying these notes are useful later on.  */
1038
  if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1039
    return 0;
1040
 
1041
  /* For now treat an insn with a REG_RETVAL note as a
1042
     a special insn which should not be considered a no-op.  */
1043
  if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1044
    return 0;
1045
 
1046
  if (GET_CODE (pat) == SET && set_noop_p (pat))
1047
    return 1;
1048
 
1049
  if (GET_CODE (pat) == PARALLEL)
1050
    {
1051
      int i;
1052
      /* If nothing but SETs of registers to themselves,
1053
         this insn can also be deleted.  */
1054
      for (i = 0; i < XVECLEN (pat, 0); i++)
1055
        {
1056
          rtx tem = XVECEXP (pat, 0, i);
1057
 
1058
          if (GET_CODE (tem) == USE
1059
              || GET_CODE (tem) == CLOBBER)
1060
            continue;
1061
 
1062
          if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1063
            return 0;
1064
        }
1065
 
1066
      return 1;
1067
    }
1068
  return 0;
1069
}
1070
 
1071
 
1072
/* Return the last thing that X was assigned from before *PINSN.  If VALID_TO
1073
   is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1074
   If the object was modified, if we hit a partial assignment to X, or hit a
1075
   CODE_LABEL first, return X.  If we found an assignment, update *PINSN to
1076
   point to it.  ALLOW_HWREG is set to 1 if hardware registers are allowed to
1077
   be the src.  */
1078
 
1079
rtx
1080
find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1081
{
1082
  rtx p;
1083
 
1084
  for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1085
       p = PREV_INSN (p))
1086
    if (INSN_P (p))
1087
      {
1088
        rtx set = single_set (p);
1089
        rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1090
 
1091
        if (set && rtx_equal_p (x, SET_DEST (set)))
1092
          {
1093
            rtx src = SET_SRC (set);
1094
 
1095
            if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1096
              src = XEXP (note, 0);
1097
 
1098
            if ((valid_to == NULL_RTX
1099
                 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1100
                /* Reject hard registers because we don't usually want
1101
                   to use them; we'd rather use a pseudo.  */
1102
                && (! (REG_P (src)
1103
                      && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1104
              {
1105
                *pinsn = p;
1106
                return src;
1107
              }
1108
          }
1109
 
1110
        /* If set in non-simple way, we don't have a value.  */
1111
        if (reg_set_p (x, p))
1112
          break;
1113
      }
1114
 
1115
  return x;
1116
}
1117
 
1118
/* Return nonzero if register in range [REGNO, ENDREGNO)
1119
   appears either explicitly or implicitly in X
1120
   other than being stored into.
1121
 
1122
   References contained within the substructure at LOC do not count.
1123
   LOC may be zero, meaning don't ignore anything.  */
1124
 
1125
int
1126
refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1127
                   rtx *loc)
1128
{
1129
  int i;
1130
  unsigned int x_regno;
1131
  RTX_CODE code;
1132
  const char *fmt;
1133
 
1134
 repeat:
1135
  /* The contents of a REG_NONNEG note is always zero, so we must come here
1136
     upon repeat in case the last REG_NOTE is a REG_NONNEG note.  */
1137
  if (x == 0)
1138
    return 0;
1139
 
1140
  code = GET_CODE (x);
1141
 
1142
  switch (code)
1143
    {
1144
    case REG:
1145
      x_regno = REGNO (x);
1146
 
1147
      /* If we modifying the stack, frame, or argument pointer, it will
1148
         clobber a virtual register.  In fact, we could be more precise,
1149
         but it isn't worth it.  */
1150
      if ((x_regno == STACK_POINTER_REGNUM
1151
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1152
           || x_regno == ARG_POINTER_REGNUM
1153
#endif
1154
           || x_regno == FRAME_POINTER_REGNUM)
1155
          && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1156
        return 1;
1157
 
1158
      return (endregno > x_regno
1159
              && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1160
                                    ? hard_regno_nregs[x_regno][GET_MODE (x)]
1161
                              : 1));
1162
 
1163
    case SUBREG:
1164
      /* If this is a SUBREG of a hard reg, we can see exactly which
1165
         registers are being modified.  Otherwise, handle normally.  */
1166
      if (REG_P (SUBREG_REG (x))
1167
          && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1168
        {
1169
          unsigned int inner_regno = subreg_regno (x);
1170
          unsigned int inner_endregno
1171
            = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1172
                             ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1173
 
1174
          return endregno > inner_regno && regno < inner_endregno;
1175
        }
1176
      break;
1177
 
1178
    case CLOBBER:
1179
    case SET:
1180
      if (&SET_DEST (x) != loc
1181
          /* Note setting a SUBREG counts as referring to the REG it is in for
1182
             a pseudo but not for hard registers since we can
1183
             treat each word individually.  */
1184
          && ((GET_CODE (SET_DEST (x)) == SUBREG
1185
               && loc != &SUBREG_REG (SET_DEST (x))
1186
               && REG_P (SUBREG_REG (SET_DEST (x)))
1187
               && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1188
               && refers_to_regno_p (regno, endregno,
1189
                                     SUBREG_REG (SET_DEST (x)), loc))
1190
              || (!REG_P (SET_DEST (x))
1191
                  && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1192
        return 1;
1193
 
1194
      if (code == CLOBBER || loc == &SET_SRC (x))
1195
        return 0;
1196
      x = SET_SRC (x);
1197
      goto repeat;
1198
 
1199
    default:
1200
      break;
1201
    }
1202
 
1203
  /* X does not match, so try its subexpressions.  */
1204
 
1205
  fmt = GET_RTX_FORMAT (code);
1206
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1207
    {
1208
      if (fmt[i] == 'e' && loc != &XEXP (x, i))
1209
        {
1210
          if (i == 0)
1211
            {
1212
              x = XEXP (x, 0);
1213
              goto repeat;
1214
            }
1215
          else
1216
            if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1217
              return 1;
1218
        }
1219
      else if (fmt[i] == 'E')
1220
        {
1221
          int j;
1222
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1223
            if (loc != &XVECEXP (x, i, j)
1224
                && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1225
              return 1;
1226
        }
1227
    }
1228
  return 0;
1229
}
1230
 
1231
/* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
1232
   we check if any register number in X conflicts with the relevant register
1233
   numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
1234
   contains a MEM (we don't bother checking for memory addresses that can't
1235
   conflict because we expect this to be a rare case.  */
1236
 
1237
int
1238
reg_overlap_mentioned_p (rtx x, rtx in)
1239
{
1240
  unsigned int regno, endregno;
1241
 
1242
  /* If either argument is a constant, then modifying X can not
1243
     affect IN.  Here we look at IN, we can profitably combine
1244
     CONSTANT_P (x) with the switch statement below.  */
1245
  if (CONSTANT_P (in))
1246
    return 0;
1247
 
1248
 recurse:
1249
  switch (GET_CODE (x))
1250
    {
1251
    case STRICT_LOW_PART:
1252
    case ZERO_EXTRACT:
1253
    case SIGN_EXTRACT:
1254
      /* Overly conservative.  */
1255
      x = XEXP (x, 0);
1256
      goto recurse;
1257
 
1258
    case SUBREG:
1259
      regno = REGNO (SUBREG_REG (x));
1260
      if (regno < FIRST_PSEUDO_REGISTER)
1261
        regno = subreg_regno (x);
1262
      goto do_reg;
1263
 
1264
    case REG:
1265
      regno = REGNO (x);
1266
    do_reg:
1267
      endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1268
                          ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1269
      return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1270
 
1271
    case MEM:
1272
      {
1273
        const char *fmt;
1274
        int i;
1275
 
1276
        if (MEM_P (in))
1277
          return 1;
1278
 
1279
        fmt = GET_RTX_FORMAT (GET_CODE (in));
1280
        for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1281
          if (fmt[i] == 'e')
1282
            {
1283
              if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1284
                return 1;
1285
            }
1286
          else if (fmt[i] == 'E')
1287
            {
1288
              int j;
1289
              for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1290
                if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1291
                  return 1;
1292
            }
1293
 
1294
        return 0;
1295
      }
1296
 
1297
    case SCRATCH:
1298
    case PC:
1299
    case CC0:
1300
      return reg_mentioned_p (x, in);
1301
 
1302
    case PARALLEL:
1303
      {
1304
        int i;
1305
 
1306
        /* If any register in here refers to it we return true.  */
1307
        for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1308
          if (XEXP (XVECEXP (x, 0, i), 0) != 0
1309
              && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1310
            return 1;
1311
        return 0;
1312
      }
1313
 
1314
    default:
1315
      gcc_assert (CONSTANT_P (x));
1316
      return 0;
1317
    }
1318
}
1319
 
1320
/* Call FUN on each register or MEM that is stored into or clobbered by X.
1321
   (X would be the pattern of an insn).
1322
   FUN receives two arguments:
1323
     the REG, MEM, CC0 or PC being stored in or clobbered,
1324
     the SET or CLOBBER rtx that does the store.
1325
 
1326
  If the item being stored in or clobbered is a SUBREG of a hard register,
1327
  the SUBREG will be passed.  */
1328
 
1329
void
1330
note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1331
{
1332
  int i;
1333
 
1334
  if (GET_CODE (x) == COND_EXEC)
1335
    x = COND_EXEC_CODE (x);
1336
 
1337
  if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1338
    {
1339
      rtx dest = SET_DEST (x);
1340
 
1341
      while ((GET_CODE (dest) == SUBREG
1342
              && (!REG_P (SUBREG_REG (dest))
1343
                  || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1344
             || GET_CODE (dest) == ZERO_EXTRACT
1345
             || GET_CODE (dest) == STRICT_LOW_PART)
1346
        dest = XEXP (dest, 0);
1347
 
1348
      /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1349
         each of whose first operand is a register.  */
1350
      if (GET_CODE (dest) == PARALLEL)
1351
        {
1352
          for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1353
            if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1354
              (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1355
        }
1356
      else
1357
        (*fun) (dest, x, data);
1358
    }
1359
 
1360
  else if (GET_CODE (x) == PARALLEL)
1361
    for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1362
      note_stores (XVECEXP (x, 0, i), fun, data);
1363
}
1364
 
1365
/* Like notes_stores, but call FUN for each expression that is being
1366
   referenced in PBODY, a pointer to the PATTERN of an insn.  We only call
1367
   FUN for each expression, not any interior subexpressions.  FUN receives a
1368
   pointer to the expression and the DATA passed to this function.
1369
 
1370
   Note that this is not quite the same test as that done in reg_referenced_p
1371
   since that considers something as being referenced if it is being
1372
   partially set, while we do not.  */
1373
 
1374
void
1375
note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1376
{
1377
  rtx body = *pbody;
1378
  int i;
1379
 
1380
  switch (GET_CODE (body))
1381
    {
1382
    case COND_EXEC:
1383
      (*fun) (&COND_EXEC_TEST (body), data);
1384
      note_uses (&COND_EXEC_CODE (body), fun, data);
1385
      return;
1386
 
1387
    case PARALLEL:
1388
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1389
        note_uses (&XVECEXP (body, 0, i), fun, data);
1390
      return;
1391
 
1392
    case USE:
1393
      (*fun) (&XEXP (body, 0), data);
1394
      return;
1395
 
1396
    case ASM_OPERANDS:
1397
      for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1398
        (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1399
      return;
1400
 
1401
    case TRAP_IF:
1402
      (*fun) (&TRAP_CONDITION (body), data);
1403
      return;
1404
 
1405
    case PREFETCH:
1406
      (*fun) (&XEXP (body, 0), data);
1407
      return;
1408
 
1409
    case UNSPEC:
1410
    case UNSPEC_VOLATILE:
1411
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1412
        (*fun) (&XVECEXP (body, 0, i), data);
1413
      return;
1414
 
1415
    case CLOBBER:
1416
      if (MEM_P (XEXP (body, 0)))
1417
        (*fun) (&XEXP (XEXP (body, 0), 0), data);
1418
      return;
1419
 
1420
    case SET:
1421
      {
1422
        rtx dest = SET_DEST (body);
1423
 
1424
        /* For sets we replace everything in source plus registers in memory
1425
           expression in store and operands of a ZERO_EXTRACT.  */
1426
        (*fun) (&SET_SRC (body), data);
1427
 
1428
        if (GET_CODE (dest) == ZERO_EXTRACT)
1429
          {
1430
            (*fun) (&XEXP (dest, 1), data);
1431
            (*fun) (&XEXP (dest, 2), data);
1432
          }
1433
 
1434
        while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1435
          dest = XEXP (dest, 0);
1436
 
1437
        if (MEM_P (dest))
1438
          (*fun) (&XEXP (dest, 0), data);
1439
      }
1440
      return;
1441
 
1442
    default:
1443
      /* All the other possibilities never store.  */
1444
      (*fun) (pbody, data);
1445
      return;
1446
    }
1447
}
1448
 
1449
/* Return nonzero if X's old contents don't survive after INSN.
1450
   This will be true if X is (cc0) or if X is a register and
1451
   X dies in INSN or because INSN entirely sets X.
1452
 
1453
   "Entirely set" means set directly and not through a SUBREG, or
1454
   ZERO_EXTRACT, so no trace of the old contents remains.
1455
   Likewise, REG_INC does not count.
1456
 
1457
   REG may be a hard or pseudo reg.  Renumbering is not taken into account,
1458
   but for this use that makes no difference, since regs don't overlap
1459
   during their lifetimes.  Therefore, this function may be used
1460
   at any time after deaths have been computed (in flow.c).
1461
 
1462
   If REG is a hard reg that occupies multiple machine registers, this
1463
   function will only return 1 if each of those registers will be replaced
1464
   by INSN.  */
1465
 
1466
int
1467
dead_or_set_p (rtx insn, rtx x)
1468
{
1469
  unsigned int regno, last_regno;
1470
  unsigned int i;
1471
 
1472
  /* Can't use cc0_rtx below since this file is used by genattrtab.c.  */
1473
  if (GET_CODE (x) == CC0)
1474
    return 1;
1475
 
1476
  gcc_assert (REG_P (x));
1477
 
1478
  regno = REGNO (x);
1479
  last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1480
                : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1481
 
1482
  for (i = regno; i <= last_regno; i++)
1483
    if (! dead_or_set_regno_p (insn, i))
1484
      return 0;
1485
 
1486
  return 1;
1487
}
1488
 
1489
/* Return TRUE iff DEST is a register or subreg of a register and
1490
   doesn't change the number of words of the inner register, and any
1491
   part of the register is TEST_REGNO.  */
1492
 
1493
static bool
1494
covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1495
{
1496
  unsigned int regno, endregno;
1497
 
1498
  if (GET_CODE (dest) == SUBREG
1499
      && (((GET_MODE_SIZE (GET_MODE (dest))
1500
            + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1501
          == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1502
               + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1503
    dest = SUBREG_REG (dest);
1504
 
1505
  if (!REG_P (dest))
1506
    return false;
1507
 
1508
  regno = REGNO (dest);
1509
  endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1510
              : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1511
  return (test_regno >= regno && test_regno < endregno);
1512
}
1513
 
1514
/* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1515
   any member matches the covers_regno_no_parallel_p criteria.  */
1516
 
1517
static bool
1518
covers_regno_p (rtx dest, unsigned int test_regno)
1519
{
1520
  if (GET_CODE (dest) == PARALLEL)
1521
    {
1522
      /* Some targets place small structures in registers for return
1523
         values of functions, and those registers are wrapped in
1524
         PARALLELs that we may see as the destination of a SET.  */
1525
      int i;
1526
 
1527
      for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1528
        {
1529
          rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1530
          if (inner != NULL_RTX
1531
              && covers_regno_no_parallel_p (inner, test_regno))
1532
            return true;
1533
        }
1534
 
1535
      return false;
1536
    }
1537
  else
1538
    return covers_regno_no_parallel_p (dest, test_regno);
1539
}
1540
 
1541
/* Utility function for dead_or_set_p to check an individual register.  Also
1542
   called from flow.c.  */
1543
 
1544
int
1545
dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1546
{
1547
  rtx pattern;
1548
 
1549
  /* See if there is a death note for something that includes TEST_REGNO.  */
1550
  if (find_regno_note (insn, REG_DEAD, test_regno))
1551
    return 1;
1552
 
1553
  if (CALL_P (insn)
1554
      && find_regno_fusage (insn, CLOBBER, test_regno))
1555
    return 1;
1556
 
1557
  pattern = PATTERN (insn);
1558
 
1559
  if (GET_CODE (pattern) == COND_EXEC)
1560
    pattern = COND_EXEC_CODE (pattern);
1561
 
1562
  if (GET_CODE (pattern) == SET)
1563
    return covers_regno_p (SET_DEST (pattern), test_regno);
1564
  else if (GET_CODE (pattern) == PARALLEL)
1565
    {
1566
      int i;
1567
 
1568
      for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1569
        {
1570
          rtx body = XVECEXP (pattern, 0, i);
1571
 
1572
          if (GET_CODE (body) == COND_EXEC)
1573
            body = COND_EXEC_CODE (body);
1574
 
1575
          if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1576
              && covers_regno_p (SET_DEST (body), test_regno))
1577
            return 1;
1578
        }
1579
    }
1580
 
1581
  return 0;
1582
}
1583
 
1584
/* Return the reg-note of kind KIND in insn INSN, if there is one.
1585
   If DATUM is nonzero, look for one whose datum is DATUM.  */
1586
 
1587
rtx
1588
find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1589
{
1590
  rtx link;
1591
 
1592
  gcc_assert (insn);
1593
 
1594
  /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1595
  if (! INSN_P (insn))
1596
    return 0;
1597
  if (datum == 0)
1598
    {
1599
      for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1600
        if (REG_NOTE_KIND (link) == kind)
1601
          return link;
1602
      return 0;
1603
    }
1604
 
1605
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1606
    if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1607
      return link;
1608
  return 0;
1609
}
1610
 
1611
/* Return the reg-note of kind KIND in insn INSN which applies to register
1612
   number REGNO, if any.  Return 0 if there is no such reg-note.  Note that
1613
   the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1614
   it might be the case that the note overlaps REGNO.  */
1615
 
1616
rtx
1617
find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1618
{
1619
  rtx link;
1620
 
1621
  /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1622
  if (! INSN_P (insn))
1623
    return 0;
1624
 
1625
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1626
    if (REG_NOTE_KIND (link) == kind
1627
        /* Verify that it is a register, so that scratch and MEM won't cause a
1628
           problem here.  */
1629
        && REG_P (XEXP (link, 0))
1630
        && REGNO (XEXP (link, 0)) <= regno
1631
        && ((REGNO (XEXP (link, 0))
1632
             + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1633
                : hard_regno_nregs[REGNO (XEXP (link, 0))]
1634
                                  [GET_MODE (XEXP (link, 0))]))
1635
            > regno))
1636
      return link;
1637
  return 0;
1638
}
1639
 
1640
/* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1641
   has such a note.  */
1642
 
1643
rtx
1644
find_reg_equal_equiv_note (rtx insn)
1645
{
1646
  rtx link;
1647
 
1648
  if (!INSN_P (insn))
1649
    return 0;
1650
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1651
    if (REG_NOTE_KIND (link) == REG_EQUAL
1652
        || REG_NOTE_KIND (link) == REG_EQUIV)
1653
      {
1654
        if (single_set (insn) == 0)
1655
          return 0;
1656
        return link;
1657
      }
1658
  return NULL;
1659
}
1660
 
1661
/* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1662
   in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1663
 
1664
int
1665
find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1666
{
1667
  /* If it's not a CALL_INSN, it can't possibly have a
1668
     CALL_INSN_FUNCTION_USAGE field, so don't bother checking.  */
1669
  if (!CALL_P (insn))
1670
    return 0;
1671
 
1672
  gcc_assert (datum);
1673
 
1674
  if (!REG_P (datum))
1675
    {
1676
      rtx link;
1677
 
1678
      for (link = CALL_INSN_FUNCTION_USAGE (insn);
1679
           link;
1680
           link = XEXP (link, 1))
1681
        if (GET_CODE (XEXP (link, 0)) == code
1682
            && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1683
          return 1;
1684
    }
1685
  else
1686
    {
1687
      unsigned int regno = REGNO (datum);
1688
 
1689
      /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1690
         to pseudo registers, so don't bother checking.  */
1691
 
1692
      if (regno < FIRST_PSEUDO_REGISTER)
1693
        {
1694
          unsigned int end_regno
1695
            = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1696
          unsigned int i;
1697
 
1698
          for (i = regno; i < end_regno; i++)
1699
            if (find_regno_fusage (insn, code, i))
1700
              return 1;
1701
        }
1702
    }
1703
 
1704
  return 0;
1705
}
1706
 
1707
/* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1708
   in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1709
 
1710
int
1711
find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1712
{
1713
  rtx link;
1714
 
1715
  /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1716
     to pseudo registers, so don't bother checking.  */
1717
 
1718
  if (regno >= FIRST_PSEUDO_REGISTER
1719
      || !CALL_P (insn) )
1720
    return 0;
1721
 
1722
  for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1723
    {
1724
      unsigned int regnote;
1725
      rtx op, reg;
1726
 
1727
      if (GET_CODE (op = XEXP (link, 0)) == code
1728
          && REG_P (reg = XEXP (op, 0))
1729
          && (regnote = REGNO (reg)) <= regno
1730
          && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1731
        return 1;
1732
    }
1733
 
1734
  return 0;
1735
}
1736
 
1737
/* Return true if INSN is a call to a pure function.  */
1738
 
1739
int
1740
pure_call_p (rtx insn)
1741
{
1742
  rtx link;
1743
 
1744
  if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1745
    return 0;
1746
 
1747
  /* Look for the note that differentiates const and pure functions.  */
1748
  for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1749
    {
1750
      rtx u, m;
1751
 
1752
      if (GET_CODE (u = XEXP (link, 0)) == USE
1753
          && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1754
          && GET_CODE (XEXP (m, 0)) == SCRATCH)
1755
        return 1;
1756
    }
1757
 
1758
  return 0;
1759
}
1760
 
1761
/* Remove register note NOTE from the REG_NOTES of INSN.  */
1762
 
1763
void
1764
remove_note (rtx insn, rtx note)
1765
{
1766
  rtx link;
1767
 
1768
  if (note == NULL_RTX)
1769
    return;
1770
 
1771
  if (REG_NOTES (insn) == note)
1772
    {
1773
      REG_NOTES (insn) = XEXP (note, 1);
1774
      return;
1775
    }
1776
 
1777
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1778
    if (XEXP (link, 1) == note)
1779
      {
1780
        XEXP (link, 1) = XEXP (note, 1);
1781
        return;
1782
      }
1783
 
1784
  gcc_unreachable ();
1785
}
1786
 
1787
/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1788
   return 1 if it is found.  A simple equality test is used to determine if
1789
   NODE matches.  */
1790
 
1791
int
1792
in_expr_list_p (rtx listp, rtx node)
1793
{
1794
  rtx x;
1795
 
1796
  for (x = listp; x; x = XEXP (x, 1))
1797
    if (node == XEXP (x, 0))
1798
      return 1;
1799
 
1800
  return 0;
1801
}
1802
 
1803
/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1804
   remove that entry from the list if it is found.
1805
 
1806
   A simple equality test is used to determine if NODE matches.  */
1807
 
1808
void
1809
remove_node_from_expr_list (rtx node, rtx *listp)
1810
{
1811
  rtx temp = *listp;
1812
  rtx prev = NULL_RTX;
1813
 
1814
  while (temp)
1815
    {
1816
      if (node == XEXP (temp, 0))
1817
        {
1818
          /* Splice the node out of the list.  */
1819
          if (prev)
1820
            XEXP (prev, 1) = XEXP (temp, 1);
1821
          else
1822
            *listp = XEXP (temp, 1);
1823
 
1824
          return;
1825
        }
1826
 
1827
      prev = temp;
1828
      temp = XEXP (temp, 1);
1829
    }
1830
}
1831
 
1832
/* Nonzero if X contains any volatile instructions.  These are instructions
1833
   which may cause unpredictable machine state instructions, and thus no
1834
   instructions should be moved or combined across them.  This includes
1835
   only volatile asms and UNSPEC_VOLATILE instructions.  */
1836
 
1837
int
1838
volatile_insn_p (rtx x)
1839
{
1840
  RTX_CODE code;
1841
 
1842
  code = GET_CODE (x);
1843
  switch (code)
1844
    {
1845
    case LABEL_REF:
1846
    case SYMBOL_REF:
1847
    case CONST_INT:
1848
    case CONST:
1849
    case CONST_DOUBLE:
1850
    case CONST_VECTOR:
1851
    case CC0:
1852
    case PC:
1853
    case REG:
1854
    case SCRATCH:
1855
    case CLOBBER:
1856
    case ADDR_VEC:
1857
    case ADDR_DIFF_VEC:
1858
    case CALL:
1859
    case MEM:
1860
      return 0;
1861
 
1862
    case UNSPEC_VOLATILE:
1863
 /* case TRAP_IF: This isn't clear yet.  */
1864
      return 1;
1865
 
1866
    case ASM_INPUT:
1867
    case ASM_OPERANDS:
1868
      if (MEM_VOLATILE_P (x))
1869
        return 1;
1870
 
1871
    default:
1872
      break;
1873
    }
1874
 
1875
  /* Recursively scan the operands of this expression.  */
1876
 
1877
  {
1878
    const char *fmt = GET_RTX_FORMAT (code);
1879
    int i;
1880
 
1881
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1882
      {
1883
        if (fmt[i] == 'e')
1884
          {
1885
            if (volatile_insn_p (XEXP (x, i)))
1886
              return 1;
1887
          }
1888
        else if (fmt[i] == 'E')
1889
          {
1890
            int j;
1891
            for (j = 0; j < XVECLEN (x, i); j++)
1892
              if (volatile_insn_p (XVECEXP (x, i, j)))
1893
                return 1;
1894
          }
1895
      }
1896
  }
1897
  return 0;
1898
}
1899
 
1900
/* Nonzero if X contains any volatile memory references
1901
   UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions.  */
1902
 
1903
int
1904
volatile_refs_p (rtx x)
1905
{
1906
  RTX_CODE code;
1907
 
1908
  code = GET_CODE (x);
1909
  switch (code)
1910
    {
1911
    case LABEL_REF:
1912
    case SYMBOL_REF:
1913
    case CONST_INT:
1914
    case CONST:
1915
    case CONST_DOUBLE:
1916
    case CONST_VECTOR:
1917
    case CC0:
1918
    case PC:
1919
    case REG:
1920
    case SCRATCH:
1921
    case CLOBBER:
1922
    case ADDR_VEC:
1923
    case ADDR_DIFF_VEC:
1924
      return 0;
1925
 
1926
    case UNSPEC_VOLATILE:
1927
      return 1;
1928
 
1929
    case MEM:
1930
    case ASM_INPUT:
1931
    case ASM_OPERANDS:
1932
      if (MEM_VOLATILE_P (x))
1933
        return 1;
1934
 
1935
    default:
1936
      break;
1937
    }
1938
 
1939
  /* Recursively scan the operands of this expression.  */
1940
 
1941
  {
1942
    const char *fmt = GET_RTX_FORMAT (code);
1943
    int i;
1944
 
1945
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1946
      {
1947
        if (fmt[i] == 'e')
1948
          {
1949
            if (volatile_refs_p (XEXP (x, i)))
1950
              return 1;
1951
          }
1952
        else if (fmt[i] == 'E')
1953
          {
1954
            int j;
1955
            for (j = 0; j < XVECLEN (x, i); j++)
1956
              if (volatile_refs_p (XVECEXP (x, i, j)))
1957
                return 1;
1958
          }
1959
      }
1960
  }
1961
  return 0;
1962
}
1963
 
1964
/* Similar to above, except that it also rejects register pre- and post-
1965
   incrementing.  */
1966
 
1967
int
1968
side_effects_p (rtx x)
1969
{
1970
  RTX_CODE code;
1971
 
1972
  code = GET_CODE (x);
1973
  switch (code)
1974
    {
1975
    case LABEL_REF:
1976
    case SYMBOL_REF:
1977
    case CONST_INT:
1978
    case CONST:
1979
    case CONST_DOUBLE:
1980
    case CONST_VECTOR:
1981
    case CC0:
1982
    case PC:
1983
    case REG:
1984
    case SCRATCH:
1985
    case ADDR_VEC:
1986
    case ADDR_DIFF_VEC:
1987
      return 0;
1988
 
1989
    case CLOBBER:
1990
      /* Reject CLOBBER with a non-VOID mode.  These are made by combine.c
1991
         when some combination can't be done.  If we see one, don't think
1992
         that we can simplify the expression.  */
1993
      return (GET_MODE (x) != VOIDmode);
1994
 
1995
    case PRE_INC:
1996
    case PRE_DEC:
1997
    case POST_INC:
1998
    case POST_DEC:
1999
    case PRE_MODIFY:
2000
    case POST_MODIFY:
2001
    case CALL:
2002
    case UNSPEC_VOLATILE:
2003
 /* case TRAP_IF: This isn't clear yet.  */
2004
      return 1;
2005
 
2006
    case MEM:
2007
    case ASM_INPUT:
2008
    case ASM_OPERANDS:
2009
      if (MEM_VOLATILE_P (x))
2010
        return 1;
2011
 
2012
    default:
2013
      break;
2014
    }
2015
 
2016
  /* Recursively scan the operands of this expression.  */
2017
 
2018
  {
2019
    const char *fmt = GET_RTX_FORMAT (code);
2020
    int i;
2021
 
2022
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2023
      {
2024
        if (fmt[i] == 'e')
2025
          {
2026
            if (side_effects_p (XEXP (x, i)))
2027
              return 1;
2028
          }
2029
        else if (fmt[i] == 'E')
2030
          {
2031
            int j;
2032
            for (j = 0; j < XVECLEN (x, i); j++)
2033
              if (side_effects_p (XVECEXP (x, i, j)))
2034
                return 1;
2035
          }
2036
      }
2037
  }
2038
  return 0;
2039
}
2040
 
2041
enum may_trap_p_flags
2042
{
2043
  MTP_UNALIGNED_MEMS = 1,
2044
  MTP_AFTER_MOVE = 2
2045
};
2046
/* Return nonzero if evaluating rtx X might cause a trap.
2047
   (FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for
2048
   unaligned memory accesses on strict alignment machines.  If
2049
   (FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression
2050
   cannot trap at its current location, but it might become trapping if moved
2051
   elsewhere.  */
2052
 
2053
static int
2054
may_trap_p_1 (rtx x, unsigned flags)
2055
{
2056
  int i;
2057
  enum rtx_code code;
2058
  const char *fmt;
2059
  bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0;
2060
 
2061
  if (x == 0)
2062
    return 0;
2063
  code = GET_CODE (x);
2064
  switch (code)
2065
    {
2066
      /* Handle these cases quickly.  */
2067
    case CONST_INT:
2068
    case CONST_DOUBLE:
2069
    case CONST_VECTOR:
2070
    case SYMBOL_REF:
2071
    case LABEL_REF:
2072
    case CONST:
2073
    case PC:
2074
    case CC0:
2075
    case REG:
2076
    case SCRATCH:
2077
      return 0;
2078
 
2079
    case ASM_INPUT:
2080
    case UNSPEC_VOLATILE:
2081
    case TRAP_IF:
2082
      return 1;
2083
 
2084
    case ASM_OPERANDS:
2085
      return MEM_VOLATILE_P (x);
2086
 
2087
      /* Memory ref can trap unless it's a static var or a stack slot.  */
2088
    case MEM:
2089
      if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2090
             reference; moving it out of condition might cause its address
2091
             become invalid.  */
2092
          !(flags & MTP_AFTER_MOVE)
2093
          && MEM_NOTRAP_P (x)
2094
          && (!STRICT_ALIGNMENT || !unaligned_mems))
2095
        return 0;
2096
      return
2097
        rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2098
 
2099
      /* Division by a non-constant might trap.  */
2100
    case DIV:
2101
    case MOD:
2102
    case UDIV:
2103
    case UMOD:
2104
      if (HONOR_SNANS (GET_MODE (x)))
2105
        return 1;
2106
      if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2107
        return flag_trapping_math;
2108
      if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2109
        return 1;
2110
      break;
2111
 
2112
    case EXPR_LIST:
2113
      /* An EXPR_LIST is used to represent a function call.  This
2114
         certainly may trap.  */
2115
      return 1;
2116
 
2117
    case GE:
2118
    case GT:
2119
    case LE:
2120
    case LT:
2121
    case LTGT:
2122
    case COMPARE:
2123
      /* Some floating point comparisons may trap.  */
2124
      if (!flag_trapping_math)
2125
        break;
2126
      /* ??? There is no machine independent way to check for tests that trap
2127
         when COMPARE is used, though many targets do make this distinction.
2128
         For instance, sparc uses CCFPE for compares which generate exceptions
2129
         and CCFP for compares which do not generate exceptions.  */
2130
      if (HONOR_NANS (GET_MODE (x)))
2131
        return 1;
2132
      /* But often the compare has some CC mode, so check operand
2133
         modes as well.  */
2134
      if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2135
          || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2136
        return 1;
2137
      break;
2138
 
2139
    case EQ:
2140
    case NE:
2141
      if (HONOR_SNANS (GET_MODE (x)))
2142
        return 1;
2143
      /* Often comparison is CC mode, so check operand modes.  */
2144
      if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2145
          || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2146
        return 1;
2147
      break;
2148
 
2149
    case FIX:
2150
      /* Conversion of floating point might trap.  */
2151
      if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2152
        return 1;
2153
      break;
2154
 
2155
    case NEG:
2156
    case ABS:
2157
    case SUBREG:
2158
      /* These operations don't trap even with floating point.  */
2159
      break;
2160
 
2161
    default:
2162
      /* Any floating arithmetic may trap.  */
2163
      if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2164
          && flag_trapping_math)
2165
        return 1;
2166
    }
2167
 
2168
  fmt = GET_RTX_FORMAT (code);
2169
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2170
    {
2171
      if (fmt[i] == 'e')
2172
        {
2173
          if (may_trap_p_1 (XEXP (x, i), flags))
2174
            return 1;
2175
        }
2176
      else if (fmt[i] == 'E')
2177
        {
2178
          int j;
2179
          for (j = 0; j < XVECLEN (x, i); j++)
2180
            if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2181
              return 1;
2182
        }
2183
    }
2184
  return 0;
2185
}
2186
 
2187
/* Return nonzero if evaluating rtx X might cause a trap.  */
2188
 
2189
int
2190
may_trap_p (rtx x)
2191
{
2192
  return may_trap_p_1 (x, 0);
2193
}
2194
 
2195
/* Return nonzero if evaluating rtx X might cause a trap, when the expression
2196
   is moved from its current location by some optimization.  */
2197
 
2198
int
2199
may_trap_after_code_motion_p (rtx x)
2200
{
2201
  return may_trap_p_1 (x, MTP_AFTER_MOVE);
2202
}
2203
 
2204
/* Same as above, but additionally return nonzero if evaluating rtx X might
2205
   cause a fault.  We define a fault for the purpose of this function as a
2206
   erroneous execution condition that cannot be encountered during the normal
2207
   execution of a valid program; the typical example is an unaligned memory
2208
   access on a strict alignment machine.  The compiler guarantees that it
2209
   doesn't generate code that will fault from a valid program, but this
2210
   guarantee doesn't mean anything for individual instructions.  Consider
2211
   the following example:
2212
 
2213
      struct S { int d; union { char *cp; int *ip; }; };
2214
 
2215
      int foo(struct S *s)
2216
      {
2217
        if (s->d == 1)
2218
          return *s->ip;
2219
        else
2220
          return *s->cp;
2221
      }
2222
 
2223
   on a strict alignment machine.  In a valid program, foo will never be
2224
   invoked on a structure for which d is equal to 1 and the underlying
2225
   unique field of the union not aligned on a 4-byte boundary, but the
2226
   expression *s->ip might cause a fault if considered individually.
2227
 
2228
   At the RTL level, potentially problematic expressions will almost always
2229
   verify may_trap_p; for example, the above dereference can be emitted as
2230
   (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2231
   However, suppose that foo is inlined in a caller that causes s->cp to
2232
   point to a local character variable and guarantees that s->d is not set
2233
   to 1; foo may have been effectively translated into pseudo-RTL as:
2234
 
2235
      if ((reg:SI) == 1)
2236
        (set (reg:SI) (mem:SI (%fp - 7)))
2237
      else
2238
        (set (reg:QI) (mem:QI (%fp - 7)))
2239
 
2240
   Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2241
   memory reference to a stack slot, but it will certainly cause a fault
2242
   on a strict alignment machine.  */
2243
 
2244
int
2245
may_trap_or_fault_p (rtx x)
2246
{
2247
  return may_trap_p_1 (x, MTP_UNALIGNED_MEMS);
2248
}
2249
 
2250
/* Return nonzero if X contains a comparison that is not either EQ or NE,
2251
   i.e., an inequality.  */
2252
 
2253
int
2254
inequality_comparisons_p (rtx x)
2255
{
2256
  const char *fmt;
2257
  int len, i;
2258
  enum rtx_code code = GET_CODE (x);
2259
 
2260
  switch (code)
2261
    {
2262
    case REG:
2263
    case SCRATCH:
2264
    case PC:
2265
    case CC0:
2266
    case CONST_INT:
2267
    case CONST_DOUBLE:
2268
    case CONST_VECTOR:
2269
    case CONST:
2270
    case LABEL_REF:
2271
    case SYMBOL_REF:
2272
      return 0;
2273
 
2274
    case LT:
2275
    case LTU:
2276
    case GT:
2277
    case GTU:
2278
    case LE:
2279
    case LEU:
2280
    case GE:
2281
    case GEU:
2282
      return 1;
2283
 
2284
    default:
2285
      break;
2286
    }
2287
 
2288
  len = GET_RTX_LENGTH (code);
2289
  fmt = GET_RTX_FORMAT (code);
2290
 
2291
  for (i = 0; i < len; i++)
2292
    {
2293
      if (fmt[i] == 'e')
2294
        {
2295
          if (inequality_comparisons_p (XEXP (x, i)))
2296
            return 1;
2297
        }
2298
      else if (fmt[i] == 'E')
2299
        {
2300
          int j;
2301
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2302
            if (inequality_comparisons_p (XVECEXP (x, i, j)))
2303
              return 1;
2304
        }
2305
    }
2306
 
2307
  return 0;
2308
}
2309
 
2310
/* Replace any occurrence of FROM in X with TO.  The function does
2311
   not enter into CONST_DOUBLE for the replace.
2312
 
2313
   Note that copying is not done so X must not be shared unless all copies
2314
   are to be modified.  */
2315
 
2316
rtx
2317
replace_rtx (rtx x, rtx from, rtx to)
2318
{
2319
  int i, j;
2320
  const char *fmt;
2321
 
2322
  /* The following prevents loops occurrence when we change MEM in
2323
     CONST_DOUBLE onto the same CONST_DOUBLE.  */
2324
  if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2325
    return x;
2326
 
2327
  if (x == from)
2328
    return to;
2329
 
2330
  /* Allow this function to make replacements in EXPR_LISTs.  */
2331
  if (x == 0)
2332
    return 0;
2333
 
2334
  if (GET_CODE (x) == SUBREG)
2335
    {
2336
      rtx new = replace_rtx (SUBREG_REG (x), from, to);
2337
 
2338
      if (GET_CODE (new) == CONST_INT)
2339
        {
2340
          x = simplify_subreg (GET_MODE (x), new,
2341
                               GET_MODE (SUBREG_REG (x)),
2342
                               SUBREG_BYTE (x));
2343
          gcc_assert (x);
2344
        }
2345
      else
2346
        SUBREG_REG (x) = new;
2347
 
2348
      return x;
2349
    }
2350
  else if (GET_CODE (x) == ZERO_EXTEND)
2351
    {
2352
      rtx new = replace_rtx (XEXP (x, 0), from, to);
2353
 
2354
      if (GET_CODE (new) == CONST_INT)
2355
        {
2356
          x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2357
                                        new, GET_MODE (XEXP (x, 0)));
2358
          gcc_assert (x);
2359
        }
2360
      else
2361
        XEXP (x, 0) = new;
2362
 
2363
      return x;
2364
    }
2365
 
2366
  fmt = GET_RTX_FORMAT (GET_CODE (x));
2367
  for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2368
    {
2369
      if (fmt[i] == 'e')
2370
        XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2371
      else if (fmt[i] == 'E')
2372
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2373
          XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2374
    }
2375
 
2376
  return x;
2377
}
2378
 
2379
/* Replace occurrences of the old label in *X with the new one.
2380
   DATA is a REPLACE_LABEL_DATA containing the old and new labels.  */
2381
 
2382
int
2383
replace_label (rtx *x, void *data)
2384
{
2385
  rtx l = *x;
2386
  rtx old_label = ((replace_label_data *) data)->r1;
2387
  rtx new_label = ((replace_label_data *) data)->r2;
2388
  bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2389
 
2390
  if (l == NULL_RTX)
2391
    return 0;
2392
 
2393
  if (GET_CODE (l) == SYMBOL_REF
2394
      && CONSTANT_POOL_ADDRESS_P (l))
2395
    {
2396
      rtx c = get_pool_constant (l);
2397
      if (rtx_referenced_p (old_label, c))
2398
        {
2399
          rtx new_c, new_l;
2400
          replace_label_data *d = (replace_label_data *) data;
2401
 
2402
          /* Create a copy of constant C; replace the label inside
2403
             but do not update LABEL_NUSES because uses in constant pool
2404
             are not counted.  */
2405
          new_c = copy_rtx (c);
2406
          d->update_label_nuses = false;
2407
          for_each_rtx (&new_c, replace_label, data);
2408
          d->update_label_nuses = update_label_nuses;
2409
 
2410
          /* Add the new constant NEW_C to constant pool and replace
2411
             the old reference to constant by new reference.  */
2412
          new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2413
          *x = replace_rtx (l, l, new_l);
2414
        }
2415
      return 0;
2416
    }
2417
 
2418
  /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2419
     field.  This is not handled by for_each_rtx because it doesn't
2420
     handle unprinted ('0') fields.  */
2421
  if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2422
    JUMP_LABEL (l) = new_label;
2423
 
2424
  if ((GET_CODE (l) == LABEL_REF
2425
       || GET_CODE (l) == INSN_LIST)
2426
      && XEXP (l, 0) == old_label)
2427
    {
2428
      XEXP (l, 0) = new_label;
2429
      if (update_label_nuses)
2430
        {
2431
          ++LABEL_NUSES (new_label);
2432
          --LABEL_NUSES (old_label);
2433
        }
2434
      return 0;
2435
    }
2436
 
2437
  return 0;
2438
}
2439
 
2440
/* When *BODY is equal to X or X is directly referenced by *BODY
2441
   return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2442
   too, otherwise FOR_EACH_RTX continues traversing *BODY.  */
2443
 
2444
static int
2445
rtx_referenced_p_1 (rtx *body, void *x)
2446
{
2447
  rtx y = (rtx) x;
2448
 
2449
  if (*body == NULL_RTX)
2450
    return y == NULL_RTX;
2451
 
2452
  /* Return true if a label_ref *BODY refers to label Y.  */
2453
  if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2454
    return XEXP (*body, 0) == y;
2455
 
2456
  /* If *BODY is a reference to pool constant traverse the constant.  */
2457
  if (GET_CODE (*body) == SYMBOL_REF
2458
      && CONSTANT_POOL_ADDRESS_P (*body))
2459
    return rtx_referenced_p (y, get_pool_constant (*body));
2460
 
2461
  /* By default, compare the RTL expressions.  */
2462
  return rtx_equal_p (*body, y);
2463
}
2464
 
2465
/* Return true if X is referenced in BODY.  */
2466
 
2467
int
2468
rtx_referenced_p (rtx x, rtx body)
2469
{
2470
  return for_each_rtx (&body, rtx_referenced_p_1, x);
2471
}
2472
 
2473
/* If INSN is a tablejump return true and store the label (before jump table) to
2474
   *LABELP and the jump table to *TABLEP.  LABELP and TABLEP may be NULL.  */
2475
 
2476
bool
2477
tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2478
{
2479
  rtx label, table;
2480
 
2481
  if (JUMP_P (insn)
2482
      && (label = JUMP_LABEL (insn)) != NULL_RTX
2483
      && (table = next_active_insn (label)) != NULL_RTX
2484
      && JUMP_P (table)
2485
      && (GET_CODE (PATTERN (table)) == ADDR_VEC
2486
          || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2487
    {
2488
      if (labelp)
2489
        *labelp = label;
2490
      if (tablep)
2491
        *tablep = table;
2492
      return true;
2493
    }
2494
  return false;
2495
}
2496
 
2497
/* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2498
   constant that is not in the constant pool and not in the condition
2499
   of an IF_THEN_ELSE.  */
2500
 
2501
static int
2502
computed_jump_p_1 (rtx x)
2503
{
2504
  enum rtx_code code = GET_CODE (x);
2505
  int i, j;
2506
  const char *fmt;
2507
 
2508
  switch (code)
2509
    {
2510
    case LABEL_REF:
2511
    case PC:
2512
      return 0;
2513
 
2514
    case CONST:
2515
    case CONST_INT:
2516
    case CONST_DOUBLE:
2517
    case CONST_VECTOR:
2518
    case SYMBOL_REF:
2519
    case REG:
2520
      return 1;
2521
 
2522
    case MEM:
2523
      return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2524
                && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2525
 
2526
    case IF_THEN_ELSE:
2527
      return (computed_jump_p_1 (XEXP (x, 1))
2528
              || computed_jump_p_1 (XEXP (x, 2)));
2529
 
2530
    default:
2531
      break;
2532
    }
2533
 
2534
  fmt = GET_RTX_FORMAT (code);
2535
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2536
    {
2537
      if (fmt[i] == 'e'
2538
          && computed_jump_p_1 (XEXP (x, i)))
2539
        return 1;
2540
 
2541
      else if (fmt[i] == 'E')
2542
        for (j = 0; j < XVECLEN (x, i); j++)
2543
          if (computed_jump_p_1 (XVECEXP (x, i, j)))
2544
            return 1;
2545
    }
2546
 
2547
  return 0;
2548
}
2549
 
2550
/* Return nonzero if INSN is an indirect jump (aka computed jump).
2551
 
2552
   Tablejumps and casesi insns are not considered indirect jumps;
2553
   we can recognize them by a (use (label_ref)).  */
2554
 
2555
int
2556
computed_jump_p (rtx insn)
2557
{
2558
  int i;
2559
  if (JUMP_P (insn))
2560
    {
2561
      rtx pat = PATTERN (insn);
2562
 
2563
      if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2564
        return 0;
2565
      else if (GET_CODE (pat) == PARALLEL)
2566
        {
2567
          int len = XVECLEN (pat, 0);
2568
          int has_use_labelref = 0;
2569
 
2570
          for (i = len - 1; i >= 0; i--)
2571
            if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2572
                && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2573
                    == LABEL_REF))
2574
              has_use_labelref = 1;
2575
 
2576
          if (! has_use_labelref)
2577
            for (i = len - 1; i >= 0; i--)
2578
              if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2579
                  && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2580
                  && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2581
                return 1;
2582
        }
2583
      else if (GET_CODE (pat) == SET
2584
               && SET_DEST (pat) == pc_rtx
2585
               && computed_jump_p_1 (SET_SRC (pat)))
2586
        return 1;
2587
    }
2588
  return 0;
2589
}
2590
 
2591
/* Optimized loop of for_each_rtx, trying to avoid useless recursive
2592
   calls.  Processes the subexpressions of EXP and passes them to F.  */
2593
static int
2594
for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2595
{
2596
  int result, i, j;
2597
  const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2598
  rtx *x;
2599
 
2600
  for (; format[n] != '\0'; n++)
2601
    {
2602
      switch (format[n])
2603
        {
2604
        case 'e':
2605
          /* Call F on X.  */
2606
          x = &XEXP (exp, n);
2607
          result = (*f) (x, data);
2608
          if (result == -1)
2609
            /* Do not traverse sub-expressions.  */
2610
            continue;
2611
          else if (result != 0)
2612
            /* Stop the traversal.  */
2613
            return result;
2614
 
2615
          if (*x == NULL_RTX)
2616
            /* There are no sub-expressions.  */
2617
            continue;
2618
 
2619
          i = non_rtx_starting_operands[GET_CODE (*x)];
2620
          if (i >= 0)
2621
            {
2622
              result = for_each_rtx_1 (*x, i, f, data);
2623
              if (result != 0)
2624
                return result;
2625
            }
2626
          break;
2627
 
2628
        case 'V':
2629
        case 'E':
2630
          if (XVEC (exp, n) == 0)
2631
            continue;
2632
          for (j = 0; j < XVECLEN (exp, n); ++j)
2633
            {
2634
              /* Call F on X.  */
2635
              x = &XVECEXP (exp, n, j);
2636
              result = (*f) (x, data);
2637
              if (result == -1)
2638
                /* Do not traverse sub-expressions.  */
2639
                continue;
2640
              else if (result != 0)
2641
                /* Stop the traversal.  */
2642
                return result;
2643
 
2644
              if (*x == NULL_RTX)
2645
                /* There are no sub-expressions.  */
2646
                continue;
2647
 
2648
              i = non_rtx_starting_operands[GET_CODE (*x)];
2649
              if (i >= 0)
2650
                {
2651
                  result = for_each_rtx_1 (*x, i, f, data);
2652
                  if (result != 0)
2653
                    return result;
2654
                }
2655
            }
2656
          break;
2657
 
2658
        default:
2659
          /* Nothing to do.  */
2660
          break;
2661
        }
2662
    }
2663
 
2664
  return 0;
2665
}
2666
 
2667
/* Traverse X via depth-first search, calling F for each
2668
   sub-expression (including X itself).  F is also passed the DATA.
2669
   If F returns -1, do not traverse sub-expressions, but continue
2670
   traversing the rest of the tree.  If F ever returns any other
2671
   nonzero value, stop the traversal, and return the value returned
2672
   by F.  Otherwise, return 0.  This function does not traverse inside
2673
   tree structure that contains RTX_EXPRs, or into sub-expressions
2674
   whose format code is `0' since it is not known whether or not those
2675
   codes are actually RTL.
2676
 
2677
   This routine is very general, and could (should?) be used to
2678
   implement many of the other routines in this file.  */
2679
 
2680
int
2681
for_each_rtx (rtx *x, rtx_function f, void *data)
2682
{
2683
  int result;
2684
  int i;
2685
 
2686
  /* Call F on X.  */
2687
  result = (*f) (x, data);
2688
  if (result == -1)
2689
    /* Do not traverse sub-expressions.  */
2690
    return 0;
2691
  else if (result != 0)
2692
    /* Stop the traversal.  */
2693
    return result;
2694
 
2695
  if (*x == NULL_RTX)
2696
    /* There are no sub-expressions.  */
2697
    return 0;
2698
 
2699
  i = non_rtx_starting_operands[GET_CODE (*x)];
2700
  if (i < 0)
2701
    return 0;
2702
 
2703
  return for_each_rtx_1 (*x, i, f, data);
2704
}
2705
 
2706
 
2707
/* Searches X for any reference to REGNO, returning the rtx of the
2708
   reference found if any.  Otherwise, returns NULL_RTX.  */
2709
 
2710
rtx
2711
regno_use_in (unsigned int regno, rtx x)
2712
{
2713
  const char *fmt;
2714
  int i, j;
2715
  rtx tem;
2716
 
2717
  if (REG_P (x) && REGNO (x) == regno)
2718
    return x;
2719
 
2720
  fmt = GET_RTX_FORMAT (GET_CODE (x));
2721
  for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2722
    {
2723
      if (fmt[i] == 'e')
2724
        {
2725
          if ((tem = regno_use_in (regno, XEXP (x, i))))
2726
            return tem;
2727
        }
2728
      else if (fmt[i] == 'E')
2729
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2730
          if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2731
            return tem;
2732
    }
2733
 
2734
  return NULL_RTX;
2735
}
2736
 
2737
/* Return a value indicating whether OP, an operand of a commutative
2738
   operation, is preferred as the first or second operand.  The higher
2739
   the value, the stronger the preference for being the first operand.
2740
   We use negative values to indicate a preference for the first operand
2741
   and positive values for the second operand.  */
2742
 
2743
int
2744
commutative_operand_precedence (rtx op)
2745
{
2746
  enum rtx_code code = GET_CODE (op);
2747
 
2748
  /* Constants always come the second operand.  Prefer "nice" constants.  */
2749
  if (code == CONST_INT)
2750
    return -7;
2751
  if (code == CONST_DOUBLE)
2752
    return -6;
2753
  op = avoid_constant_pool_reference (op);
2754
  code = GET_CODE (op);
2755
 
2756
  switch (GET_RTX_CLASS (code))
2757
    {
2758
    case RTX_CONST_OBJ:
2759
      if (code == CONST_INT)
2760
        return -5;
2761
      if (code == CONST_DOUBLE)
2762
        return -4;
2763
      return -3;
2764
 
2765
    case RTX_EXTRA:
2766
      /* SUBREGs of objects should come second.  */
2767
      if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2768
        return -2;
2769
 
2770
      if (!CONSTANT_P (op))
2771
        return 0;
2772
      else
2773
        /* As for RTX_CONST_OBJ.  */
2774
        return -3;
2775
 
2776
    case RTX_OBJ:
2777
      /* Complex expressions should be the first, so decrease priority
2778
         of objects.  */
2779
      return -1;
2780
 
2781
    case RTX_COMM_ARITH:
2782
      /* Prefer operands that are themselves commutative to be first.
2783
         This helps to make things linear.  In particular,
2784
         (and (and (reg) (reg)) (not (reg))) is canonical.  */
2785
      return 4;
2786
 
2787
    case RTX_BIN_ARITH:
2788
      /* If only one operand is a binary expression, it will be the first
2789
         operand.  In particular,  (plus (minus (reg) (reg)) (neg (reg)))
2790
         is canonical, although it will usually be further simplified.  */
2791
      return 2;
2792
 
2793
    case RTX_UNARY:
2794
      /* Then prefer NEG and NOT.  */
2795
      if (code == NEG || code == NOT)
2796
        return 1;
2797
 
2798
    default:
2799
      return 0;
2800
    }
2801
}
2802
 
2803
/* Return 1 iff it is necessary to swap operands of commutative operation
2804
   in order to canonicalize expression.  */
2805
 
2806
int
2807
swap_commutative_operands_p (rtx x, rtx y)
2808
{
2809
  return (commutative_operand_precedence (x)
2810
          < commutative_operand_precedence (y));
2811
}
2812
 
2813
/* Return 1 if X is an autoincrement side effect and the register is
2814
   not the stack pointer.  */
2815
int
2816
auto_inc_p (rtx x)
2817
{
2818
  switch (GET_CODE (x))
2819
    {
2820
    case PRE_INC:
2821
    case POST_INC:
2822
    case PRE_DEC:
2823
    case POST_DEC:
2824
    case PRE_MODIFY:
2825
    case POST_MODIFY:
2826
      /* There are no REG_INC notes for SP.  */
2827
      if (XEXP (x, 0) != stack_pointer_rtx)
2828
        return 1;
2829
    default:
2830
      break;
2831
    }
2832
  return 0;
2833
}
2834
 
2835
/* Return nonzero if IN contains a piece of rtl that has the address LOC.  */
2836
int
2837
loc_mentioned_in_p (rtx *loc, rtx in)
2838
{
2839
  enum rtx_code code = GET_CODE (in);
2840
  const char *fmt = GET_RTX_FORMAT (code);
2841
  int i, j;
2842
 
2843
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2844
    {
2845
      if (loc == &in->u.fld[i].rt_rtx)
2846
        return 1;
2847
      if (fmt[i] == 'e')
2848
        {
2849
          if (loc_mentioned_in_p (loc, XEXP (in, i)))
2850
            return 1;
2851
        }
2852
      else if (fmt[i] == 'E')
2853
        for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2854
          if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2855
            return 1;
2856
    }
2857
  return 0;
2858
}
2859
 
2860
/* Helper function for subreg_lsb.  Given a subreg's OUTER_MODE, INNER_MODE,
2861
   and SUBREG_BYTE, return the bit offset where the subreg begins
2862
   (counting from the least significant bit of the operand).  */
2863
 
2864
unsigned int
2865
subreg_lsb_1 (enum machine_mode outer_mode,
2866
              enum machine_mode inner_mode,
2867
              unsigned int subreg_byte)
2868
{
2869
  unsigned int bitpos;
2870
  unsigned int byte;
2871
  unsigned int word;
2872
 
2873
  /* A paradoxical subreg begins at bit position 0.  */
2874
  if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
2875
    return 0;
2876
 
2877
  if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2878
    /* If the subreg crosses a word boundary ensure that
2879
       it also begins and ends on a word boundary.  */
2880
    gcc_assert (!((subreg_byte % UNITS_PER_WORD
2881
                  + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
2882
                  && (subreg_byte % UNITS_PER_WORD
2883
                      || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
2884
 
2885
  if (WORDS_BIG_ENDIAN)
2886
    word = (GET_MODE_SIZE (inner_mode)
2887
            - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
2888
  else
2889
    word = subreg_byte / UNITS_PER_WORD;
2890
  bitpos = word * BITS_PER_WORD;
2891
 
2892
  if (BYTES_BIG_ENDIAN)
2893
    byte = (GET_MODE_SIZE (inner_mode)
2894
            - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
2895
  else
2896
    byte = subreg_byte % UNITS_PER_WORD;
2897
  bitpos += byte * BITS_PER_UNIT;
2898
 
2899
  return bitpos;
2900
}
2901
 
2902
/* Given a subreg X, return the bit offset where the subreg begins
2903
   (counting from the least significant bit of the reg).  */
2904
 
2905
unsigned int
2906
subreg_lsb (rtx x)
2907
{
2908
  return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
2909
                       SUBREG_BYTE (x));
2910
}
2911
 
2912
/* This function returns the regno offset of a subreg expression.
2913
   xregno - A regno of an inner hard subreg_reg (or what will become one).
2914
   xmode  - The mode of xregno.
2915
   offset - The byte offset.
2916
   ymode  - The mode of a top level SUBREG (or what may become one).
2917
   RETURN - The regno offset which would be used.  */
2918
unsigned int
2919
subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
2920
                     unsigned int offset, enum machine_mode ymode)
2921
{
2922
  int nregs_xmode, nregs_ymode;
2923
  int mode_multiple, nregs_multiple;
2924
  int y_offset;
2925
 
2926
  gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
2927
 
2928
  /* Adjust nregs_xmode to allow for 'holes'.  */
2929
  if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
2930
    nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
2931
  else
2932
    nregs_xmode = hard_regno_nregs[xregno][xmode];
2933
 
2934
  nregs_ymode = hard_regno_nregs[xregno][ymode];
2935
 
2936
  /* If this is a big endian paradoxical subreg, which uses more actual
2937
     hard registers than the original register, we must return a negative
2938
     offset so that we find the proper highpart of the register.  */
2939
  if (offset == 0
2940
      && nregs_ymode > nregs_xmode
2941
      && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2942
          ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
2943
    return nregs_xmode - nregs_ymode;
2944
 
2945
  if (offset == 0 || nregs_xmode == nregs_ymode)
2946
    return 0;
2947
 
2948
  /* Size of ymode must not be greater than the size of xmode.  */
2949
  mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
2950
  gcc_assert (mode_multiple != 0);
2951
 
2952
  y_offset = offset / GET_MODE_SIZE (ymode);
2953
  nregs_multiple =  nregs_xmode / nregs_ymode;
2954
  return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
2955
}
2956
 
2957
/* This function returns true when the offset is representable via
2958
   subreg_offset in the given regno.
2959
   xregno - A regno of an inner hard subreg_reg (or what will become one).
2960
   xmode  - The mode of xregno.
2961
   offset - The byte offset.
2962
   ymode  - The mode of a top level SUBREG (or what may become one).
2963
   RETURN - Whether the offset is representable.  */
2964
bool
2965
subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
2966
                               unsigned int offset, enum machine_mode ymode)
2967
{
2968
  int nregs_xmode, nregs_ymode;
2969
  int mode_multiple, nregs_multiple;
2970
  int y_offset;
2971
  int regsize_xmode, regsize_ymode;
2972
 
2973
  gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
2974
 
2975
  /* If there are holes in a non-scalar mode in registers, we expect
2976
     that it is made up of its units concatenated together.  */
2977
  if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
2978
    {
2979
      enum machine_mode xmode_unit;
2980
 
2981
      nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
2982
      if (GET_MODE_INNER (xmode) == VOIDmode)
2983
        xmode_unit = xmode;
2984
      else
2985
        xmode_unit = GET_MODE_INNER (xmode);
2986
      gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
2987
      gcc_assert (nregs_xmode
2988
                  == (GET_MODE_NUNITS (xmode)
2989
                      * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
2990
      gcc_assert (hard_regno_nregs[xregno][xmode]
2991
                  == (hard_regno_nregs[xregno][xmode_unit]
2992
                      * GET_MODE_NUNITS (xmode)));
2993
 
2994
      /* You can only ask for a SUBREG of a value with holes in the middle
2995
         if you don't cross the holes.  (Such a SUBREG should be done by
2996
         picking a different register class, or doing it in memory if
2997
         necessary.)  An example of a value with holes is XCmode on 32-bit
2998
         x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
2999
         3 for each part, but in memory it's two 128-bit parts.
3000
         Padding is assumed to be at the end (not necessarily the 'high part')
3001
         of each unit.  */
3002
      if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3003
           < GET_MODE_NUNITS (xmode))
3004
          && (offset / GET_MODE_SIZE (xmode_unit)
3005
              != ((offset + GET_MODE_SIZE (ymode) - 1)
3006
                  / GET_MODE_SIZE (xmode_unit))))
3007
        return false;
3008
    }
3009
  else
3010
    nregs_xmode = hard_regno_nregs[xregno][xmode];
3011
 
3012
  nregs_ymode = hard_regno_nregs[xregno][ymode];
3013
 
3014
  /* Paradoxical subregs are otherwise valid.  */
3015
  if (offset == 0
3016
      && nregs_ymode > nregs_xmode
3017
      && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3018
          ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3019
    return true;
3020
 
3021
  /* If registers store different numbers of bits in the different
3022
     modes, we cannot generally form this subreg.  */
3023
  regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3024
  regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3025
  if (regsize_xmode > regsize_ymode && nregs_ymode > 1)
3026
    return false;
3027
  if (regsize_ymode > regsize_xmode && nregs_xmode > 1)
3028
    return false;
3029
 
3030
  /* Lowpart subregs are otherwise valid.  */
3031
  if (offset == subreg_lowpart_offset (ymode, xmode))
3032
    return true;
3033
 
3034
  /* This should always pass, otherwise we don't know how to verify
3035
     the constraint.  These conditions may be relaxed but
3036
     subreg_regno_offset would need to be redesigned.  */
3037
  gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3038
  gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3039
 
3040
  /* The XMODE value can be seen as a vector of NREGS_XMODE
3041
     values.  The subreg must represent a lowpart of given field.
3042
     Compute what field it is.  */
3043
  offset -= subreg_lowpart_offset (ymode,
3044
                                   mode_for_size (GET_MODE_BITSIZE (xmode)
3045
                                                  / nregs_xmode,
3046
                                                  MODE_INT, 0));
3047
 
3048
  /* Size of ymode must not be greater than the size of xmode.  */
3049
  mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3050
  gcc_assert (mode_multiple != 0);
3051
 
3052
  y_offset = offset / GET_MODE_SIZE (ymode);
3053
  nregs_multiple =  nregs_xmode / nregs_ymode;
3054
 
3055
  gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3056
  gcc_assert ((mode_multiple % nregs_multiple) == 0);
3057
 
3058
  return (!(y_offset % (mode_multiple / nregs_multiple)));
3059
}
3060
 
3061
/* Return the final regno that a subreg expression refers to.  */
3062
unsigned int
3063
subreg_regno (rtx x)
3064
{
3065
  unsigned int ret;
3066
  rtx subreg = SUBREG_REG (x);
3067
  int regno = REGNO (subreg);
3068
 
3069
  ret = regno + subreg_regno_offset (regno,
3070
                                     GET_MODE (subreg),
3071
                                     SUBREG_BYTE (x),
3072
                                     GET_MODE (x));
3073
  return ret;
3074
 
3075
}
3076
struct parms_set_data
3077
{
3078
  int nregs;
3079
  HARD_REG_SET regs;
3080
};
3081
 
3082
/* Helper function for noticing stores to parameter registers.  */
3083
static void
3084
parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3085
{
3086
  struct parms_set_data *d = data;
3087
  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3088
      && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3089
    {
3090
      CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3091
      d->nregs--;
3092
    }
3093
}
3094
 
3095
/* Look backward for first parameter to be loaded.
3096
   Note that loads of all parameters will not necessarily be
3097
   found if CSE has eliminated some of them (e.g., an argument
3098
   to the outer function is passed down as a parameter).
3099
   Do not skip BOUNDARY.  */
3100
rtx
3101
find_first_parameter_load (rtx call_insn, rtx boundary)
3102
{
3103
  struct parms_set_data parm;
3104
  rtx p, before, first_set;
3105
 
3106
  /* Since different machines initialize their parameter registers
3107
     in different orders, assume nothing.  Collect the set of all
3108
     parameter registers.  */
3109
  CLEAR_HARD_REG_SET (parm.regs);
3110
  parm.nregs = 0;
3111
  for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3112
    if (GET_CODE (XEXP (p, 0)) == USE
3113
        && REG_P (XEXP (XEXP (p, 0), 0)))
3114
      {
3115
        gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3116
 
3117
        /* We only care about registers which can hold function
3118
           arguments.  */
3119
        if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3120
          continue;
3121
 
3122
        SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3123
        parm.nregs++;
3124
      }
3125
  before = call_insn;
3126
  first_set = call_insn;
3127
 
3128
  /* Search backward for the first set of a register in this set.  */
3129
  while (parm.nregs && before != boundary)
3130
    {
3131
      before = PREV_INSN (before);
3132
 
3133
      /* It is possible that some loads got CSEed from one call to
3134
         another.  Stop in that case.  */
3135
      if (CALL_P (before))
3136
        break;
3137
 
3138
      /* Our caller needs either ensure that we will find all sets
3139
         (in case code has not been optimized yet), or take care
3140
         for possible labels in a way by setting boundary to preceding
3141
         CODE_LABEL.  */
3142
      if (LABEL_P (before))
3143
        {
3144
          gcc_assert (before == boundary);
3145
          break;
3146
        }
3147
 
3148
      if (INSN_P (before))
3149
        {
3150
          int nregs_old = parm.nregs;
3151
          note_stores (PATTERN (before), parms_set, &parm);
3152
          /* If we found something that did not set a parameter reg,
3153
             we're done.  Do not keep going, as that might result
3154
             in hoisting an insn before the setting of a pseudo
3155
             that is used by the hoisted insn. */
3156
          if (nregs_old != parm.nregs)
3157
            first_set = before;
3158
          else
3159
            break;
3160
        }
3161
    }
3162
  return first_set;
3163
}
3164
 
3165
/* Return true if we should avoid inserting code between INSN and preceding
3166
   call instruction.  */
3167
 
3168
bool
3169
keep_with_call_p (rtx insn)
3170
{
3171
  rtx set;
3172
 
3173
  if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3174
    {
3175
      if (REG_P (SET_DEST (set))
3176
          && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3177
          && fixed_regs[REGNO (SET_DEST (set))]
3178
          && general_operand (SET_SRC (set), VOIDmode))
3179
        return true;
3180
      if (REG_P (SET_SRC (set))
3181
          && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3182
          && REG_P (SET_DEST (set))
3183
          && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3184
        return true;
3185
      /* There may be a stack pop just after the call and before the store
3186
         of the return register.  Search for the actual store when deciding
3187
         if we can break or not.  */
3188
      if (SET_DEST (set) == stack_pointer_rtx)
3189
        {
3190
          rtx i2 = next_nonnote_insn (insn);
3191
          if (i2 && keep_with_call_p (i2))
3192
            return true;
3193
        }
3194
    }
3195
  return false;
3196
}
3197
 
3198
/* Return true if LABEL is a target of JUMP_INSN.  This applies only
3199
   to non-complex jumps.  That is, direct unconditional, conditional,
3200
   and tablejumps, but not computed jumps or returns.  It also does
3201
   not apply to the fallthru case of a conditional jump.  */
3202
 
3203
bool
3204
label_is_jump_target_p (rtx label, rtx jump_insn)
3205
{
3206
  rtx tmp = JUMP_LABEL (jump_insn);
3207
 
3208
  if (label == tmp)
3209
    return true;
3210
 
3211
  if (tablejump_p (jump_insn, NULL, &tmp))
3212
    {
3213
      rtvec vec = XVEC (PATTERN (tmp),
3214
                        GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3215
      int i, veclen = GET_NUM_ELEM (vec);
3216
 
3217
      for (i = 0; i < veclen; ++i)
3218
        if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3219
          return true;
3220
    }
3221
 
3222
  return false;
3223
}
3224
 
3225
 
3226
/* Return an estimate of the cost of computing rtx X.
3227
   One use is in cse, to decide which expression to keep in the hash table.
3228
   Another is in rtl generation, to pick the cheapest way to multiply.
3229
   Other uses like the latter are expected in the future.  */
3230
 
3231
int
3232
rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3233
{
3234
  int i, j;
3235
  enum rtx_code code;
3236
  const char *fmt;
3237
  int total;
3238
 
3239
  if (x == 0)
3240
    return 0;
3241
 
3242
  /* Compute the default costs of certain things.
3243
     Note that targetm.rtx_costs can override the defaults.  */
3244
 
3245
  code = GET_CODE (x);
3246
  switch (code)
3247
    {
3248
    case MULT:
3249
      total = COSTS_N_INSNS (5);
3250
      break;
3251
    case DIV:
3252
    case UDIV:
3253
    case MOD:
3254
    case UMOD:
3255
      total = COSTS_N_INSNS (7);
3256
      break;
3257
    case USE:
3258
      /* Used in combine.c as a marker.  */
3259
      total = 0;
3260
      break;
3261
    default:
3262
      total = COSTS_N_INSNS (1);
3263
    }
3264
 
3265
  switch (code)
3266
    {
3267
    case REG:
3268
      return 0;
3269
 
3270
    case SUBREG:
3271
      total = 0;
3272
      /* If we can't tie these modes, make this expensive.  The larger
3273
         the mode, the more expensive it is.  */
3274
      if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3275
        return COSTS_N_INSNS (2
3276
                              + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3277
      break;
3278
 
3279
    default:
3280
      if (targetm.rtx_costs (x, code, outer_code, &total))
3281
        return total;
3282
      break;
3283
    }
3284
 
3285
  /* Sum the costs of the sub-rtx's, plus cost of this operation,
3286
     which is already in total.  */
3287
 
3288
  fmt = GET_RTX_FORMAT (code);
3289
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3290
    if (fmt[i] == 'e')
3291
      total += rtx_cost (XEXP (x, i), code);
3292
    else if (fmt[i] == 'E')
3293
      for (j = 0; j < XVECLEN (x, i); j++)
3294
        total += rtx_cost (XVECEXP (x, i, j), code);
3295
 
3296
  return total;
3297
}
3298
 
3299
/* Return cost of address expression X.
3300
   Expect that X is properly formed address reference.  */
3301
 
3302
int
3303
address_cost (rtx x, enum machine_mode mode)
3304
{
3305
  /* We may be asked for cost of various unusual addresses, such as operands
3306
     of push instruction.  It is not worthwhile to complicate writing
3307
     of the target hook by such cases.  */
3308
 
3309
  if (!memory_address_p (mode, x))
3310
    return 1000;
3311
 
3312
  return targetm.address_cost (x);
3313
}
3314
 
3315
/* If the target doesn't override, compute the cost as with arithmetic.  */
3316
 
3317
int
3318
default_address_cost (rtx x)
3319
{
3320
  return rtx_cost (x, MEM);
3321
}
3322
 
3323
 
3324
unsigned HOST_WIDE_INT
3325
nonzero_bits (rtx x, enum machine_mode mode)
3326
{
3327
  return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3328
}
3329
 
3330
unsigned int
3331
num_sign_bit_copies (rtx x, enum machine_mode mode)
3332
{
3333
  return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3334
}
3335
 
3336
/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3337
   It avoids exponential behavior in nonzero_bits1 when X has
3338
   identical subexpressions on the first or the second level.  */
3339
 
3340
static unsigned HOST_WIDE_INT
3341
cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3342
                     enum machine_mode known_mode,
3343
                     unsigned HOST_WIDE_INT known_ret)
3344
{
3345
  if (x == known_x && mode == known_mode)
3346
    return known_ret;
3347
 
3348
  /* Try to find identical subexpressions.  If found call
3349
     nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3350
     precomputed value for the subexpression as KNOWN_RET.  */
3351
 
3352
  if (ARITHMETIC_P (x))
3353
    {
3354
      rtx x0 = XEXP (x, 0);
3355
      rtx x1 = XEXP (x, 1);
3356
 
3357
      /* Check the first level.  */
3358
      if (x0 == x1)
3359
        return nonzero_bits1 (x, mode, x0, mode,
3360
                              cached_nonzero_bits (x0, mode, known_x,
3361
                                                   known_mode, known_ret));
3362
 
3363
      /* Check the second level.  */
3364
      if (ARITHMETIC_P (x0)
3365
          && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3366
        return nonzero_bits1 (x, mode, x1, mode,
3367
                              cached_nonzero_bits (x1, mode, known_x,
3368
                                                   known_mode, known_ret));
3369
 
3370
      if (ARITHMETIC_P (x1)
3371
          && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3372
        return nonzero_bits1 (x, mode, x0, mode,
3373
                              cached_nonzero_bits (x0, mode, known_x,
3374
                                                   known_mode, known_ret));
3375
    }
3376
 
3377
  return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3378
}
3379
 
3380
/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3381
   We don't let nonzero_bits recur into num_sign_bit_copies, because that
3382
   is less useful.  We can't allow both, because that results in exponential
3383
   run time recursion.  There is a nullstone testcase that triggered
3384
   this.  This macro avoids accidental uses of num_sign_bit_copies.  */
3385
#define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3386
 
3387
/* Given an expression, X, compute which bits in X can be nonzero.
3388
   We don't care about bits outside of those defined in MODE.
3389
 
3390
   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3391
   an arithmetic operation, we can do better.  */
3392
 
3393
static unsigned HOST_WIDE_INT
3394
nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3395
               enum machine_mode known_mode,
3396
               unsigned HOST_WIDE_INT known_ret)
3397
{
3398
  unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3399
  unsigned HOST_WIDE_INT inner_nz;
3400
  enum rtx_code code;
3401
  unsigned int mode_width = GET_MODE_BITSIZE (mode);
3402
 
3403
  /* For floating-point values, assume all bits are needed.  */
3404
  if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3405
    return nonzero;
3406
 
3407
  /* If X is wider than MODE, use its mode instead.  */
3408
  if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3409
    {
3410
      mode = GET_MODE (x);
3411
      nonzero = GET_MODE_MASK (mode);
3412
      mode_width = GET_MODE_BITSIZE (mode);
3413
    }
3414
 
3415
  if (mode_width > HOST_BITS_PER_WIDE_INT)
3416
    /* Our only callers in this case look for single bit values.  So
3417
       just return the mode mask.  Those tests will then be false.  */
3418
    return nonzero;
3419
 
3420
#ifndef WORD_REGISTER_OPERATIONS
3421
  /* If MODE is wider than X, but both are a single word for both the host
3422
     and target machines, we can compute this from which bits of the
3423
     object might be nonzero in its own mode, taking into account the fact
3424
     that on many CISC machines, accessing an object in a wider mode
3425
     causes the high-order bits to become undefined.  So they are
3426
     not known to be zero.  */
3427
 
3428
  if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3429
      && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3430
      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3431
      && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3432
    {
3433
      nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3434
                                      known_x, known_mode, known_ret);
3435
      nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3436
      return nonzero;
3437
    }
3438
#endif
3439
 
3440
  code = GET_CODE (x);
3441
  switch (code)
3442
    {
3443
    case REG:
3444
#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3445
      /* If pointers extend unsigned and this is a pointer in Pmode, say that
3446
         all the bits above ptr_mode are known to be zero.  */
3447
      if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3448
          && REG_POINTER (x))
3449
        nonzero &= GET_MODE_MASK (ptr_mode);
3450
#endif
3451
 
3452
      /* Include declared information about alignment of pointers.  */
3453
      /* ??? We don't properly preserve REG_POINTER changes across
3454
         pointer-to-integer casts, so we can't trust it except for
3455
         things that we know must be pointers.  See execute/960116-1.c.  */
3456
      if ((x == stack_pointer_rtx
3457
           || x == frame_pointer_rtx
3458
           || x == arg_pointer_rtx)
3459
          && REGNO_POINTER_ALIGN (REGNO (x)))
3460
        {
3461
          unsigned HOST_WIDE_INT alignment
3462
            = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3463
 
3464
#ifdef PUSH_ROUNDING
3465
          /* If PUSH_ROUNDING is defined, it is possible for the
3466
             stack to be momentarily aligned only to that amount,
3467
             so we pick the least alignment.  */
3468
          if (x == stack_pointer_rtx && PUSH_ARGS)
3469
            alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3470
                             alignment);
3471
#endif
3472
 
3473
          nonzero &= ~(alignment - 1);
3474
        }
3475
 
3476
      {
3477
        unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3478
        rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3479
                                              known_mode, known_ret,
3480
                                              &nonzero_for_hook);
3481
 
3482
        if (new)
3483
          nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3484
                                                   known_mode, known_ret);
3485
 
3486
        return nonzero_for_hook;
3487
      }
3488
 
3489
    case CONST_INT:
3490
#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3491
      /* If X is negative in MODE, sign-extend the value.  */
3492
      if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3493
          && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3494
        return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3495
#endif
3496
 
3497
      return INTVAL (x);
3498
 
3499
    case MEM:
3500
#ifdef LOAD_EXTEND_OP
3501
      /* In many, if not most, RISC machines, reading a byte from memory
3502
         zeros the rest of the register.  Noticing that fact saves a lot
3503
         of extra zero-extends.  */
3504
      if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3505
        nonzero &= GET_MODE_MASK (GET_MODE (x));
3506
#endif
3507
      break;
3508
 
3509
    case EQ:  case NE:
3510
    case UNEQ:  case LTGT:
3511
    case GT:  case GTU:  case UNGT:
3512
    case LT:  case LTU:  case UNLT:
3513
    case GE:  case GEU:  case UNGE:
3514
    case LE:  case LEU:  case UNLE:
3515
    case UNORDERED: case ORDERED:
3516
      /* If this produces an integer result, we know which bits are set.
3517
         Code here used to clear bits outside the mode of X, but that is
3518
         now done above.  */
3519
      /* Mind that MODE is the mode the caller wants to look at this
3520
         operation in, and not the actual operation mode.  We can wind
3521
         up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3522
         that describes the results of a vector compare.  */
3523
      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3524
          && mode_width <= HOST_BITS_PER_WIDE_INT)
3525
        nonzero = STORE_FLAG_VALUE;
3526
      break;
3527
 
3528
    case NEG:
3529
#if 0
3530
      /* Disabled to avoid exponential mutual recursion between nonzero_bits
3531
         and num_sign_bit_copies.  */
3532
      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3533
          == GET_MODE_BITSIZE (GET_MODE (x)))
3534
        nonzero = 1;
3535
#endif
3536
 
3537
      if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3538
        nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3539
      break;
3540
 
3541
    case ABS:
3542
#if 0
3543
      /* Disabled to avoid exponential mutual recursion between nonzero_bits
3544
         and num_sign_bit_copies.  */
3545
      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3546
          == GET_MODE_BITSIZE (GET_MODE (x)))
3547
        nonzero = 1;
3548
#endif
3549
      break;
3550
 
3551
    case TRUNCATE:
3552
      nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3553
                                       known_x, known_mode, known_ret)
3554
                  & GET_MODE_MASK (mode));
3555
      break;
3556
 
3557
    case ZERO_EXTEND:
3558
      nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3559
                                      known_x, known_mode, known_ret);
3560
      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3561
        nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3562
      break;
3563
 
3564
    case SIGN_EXTEND:
3565
      /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3566
         Otherwise, show all the bits in the outer mode but not the inner
3567
         may be nonzero.  */
3568
      inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3569
                                      known_x, known_mode, known_ret);
3570
      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3571
        {
3572
          inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3573
          if (inner_nz
3574
              & (((HOST_WIDE_INT) 1
3575
                  << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3576
            inner_nz |= (GET_MODE_MASK (mode)
3577
                         & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3578
        }
3579
 
3580
      nonzero &= inner_nz;
3581
      break;
3582
 
3583
    case AND:
3584
      nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3585
                                       known_x, known_mode, known_ret)
3586
                 & cached_nonzero_bits (XEXP (x, 1), mode,
3587
                                        known_x, known_mode, known_ret);
3588
      break;
3589
 
3590
    case XOR:   case IOR:
3591
    case UMIN:  case UMAX:  case SMIN:  case SMAX:
3592
      {
3593
        unsigned HOST_WIDE_INT nonzero0 =
3594
          cached_nonzero_bits (XEXP (x, 0), mode,
3595
                               known_x, known_mode, known_ret);
3596
 
3597
        /* Don't call nonzero_bits for the second time if it cannot change
3598
           anything.  */
3599
        if ((nonzero & nonzero0) != nonzero)
3600
          nonzero &= nonzero0
3601
                     | cached_nonzero_bits (XEXP (x, 1), mode,
3602
                                            known_x, known_mode, known_ret);
3603
      }
3604
      break;
3605
 
3606
    case PLUS:  case MINUS:
3607
    case MULT:
3608
    case DIV:   case UDIV:
3609
    case MOD:   case UMOD:
3610
      /* We can apply the rules of arithmetic to compute the number of
3611
         high- and low-order zero bits of these operations.  We start by
3612
         computing the width (position of the highest-order nonzero bit)
3613
         and the number of low-order zero bits for each value.  */
3614
      {
3615
        unsigned HOST_WIDE_INT nz0 =
3616
          cached_nonzero_bits (XEXP (x, 0), mode,
3617
                               known_x, known_mode, known_ret);
3618
        unsigned HOST_WIDE_INT nz1 =
3619
          cached_nonzero_bits (XEXP (x, 1), mode,
3620
                               known_x, known_mode, known_ret);
3621
        int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3622
        int width0 = floor_log2 (nz0) + 1;
3623
        int width1 = floor_log2 (nz1) + 1;
3624
        int low0 = floor_log2 (nz0 & -nz0);
3625
        int low1 = floor_log2 (nz1 & -nz1);
3626
        HOST_WIDE_INT op0_maybe_minusp
3627
          = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3628
        HOST_WIDE_INT op1_maybe_minusp
3629
          = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3630
        unsigned int result_width = mode_width;
3631
        int result_low = 0;
3632
 
3633
        switch (code)
3634
          {
3635
          case PLUS:
3636
            result_width = MAX (width0, width1) + 1;
3637
            result_low = MIN (low0, low1);
3638
            break;
3639
          case MINUS:
3640
            result_low = MIN (low0, low1);
3641
            break;
3642
          case MULT:
3643
            result_width = width0 + width1;
3644
            result_low = low0 + low1;
3645
            break;
3646
          case DIV:
3647
            if (width1 == 0)
3648
              break;
3649
            if (! op0_maybe_minusp && ! op1_maybe_minusp)
3650
              result_width = width0;
3651
            break;
3652
          case UDIV:
3653
            if (width1 == 0)
3654
              break;
3655
            result_width = width0;
3656
            break;
3657
          case MOD:
3658
            if (width1 == 0)
3659
              break;
3660
            if (! op0_maybe_minusp && ! op1_maybe_minusp)
3661
              result_width = MIN (width0, width1);
3662
            result_low = MIN (low0, low1);
3663
            break;
3664
          case UMOD:
3665
            if (width1 == 0)
3666
              break;
3667
            result_width = MIN (width0, width1);
3668
            result_low = MIN (low0, low1);
3669
            break;
3670
          default:
3671
            gcc_unreachable ();
3672
          }
3673
 
3674
        if (result_width < mode_width)
3675
          nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3676
 
3677
        if (result_low > 0)
3678
          nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3679
 
3680
#ifdef POINTERS_EXTEND_UNSIGNED
3681
        /* If pointers extend unsigned and this is an addition or subtraction
3682
           to a pointer in Pmode, all the bits above ptr_mode are known to be
3683
           zero.  */
3684
        if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3685
            && (code == PLUS || code == MINUS)
3686
            && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3687
          nonzero &= GET_MODE_MASK (ptr_mode);
3688
#endif
3689
      }
3690
      break;
3691
 
3692
    case ZERO_EXTRACT:
3693
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
3694
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3695
        nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3696
      break;
3697
 
3698
    case SUBREG:
3699
      /* If this is a SUBREG formed for a promoted variable that has
3700
         been zero-extended, we know that at least the high-order bits
3701
         are zero, though others might be too.  */
3702
 
3703
      if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3704
        nonzero = GET_MODE_MASK (GET_MODE (x))
3705
                  & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3706
                                         known_x, known_mode, known_ret);
3707
 
3708
      /* If the inner mode is a single word for both the host and target
3709
         machines, we can compute this from which bits of the inner
3710
         object might be nonzero.  */
3711
      if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3712
          && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3713
              <= HOST_BITS_PER_WIDE_INT))
3714
        {
3715
          nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3716
                                          known_x, known_mode, known_ret);
3717
 
3718
#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3719
          /* If this is a typical RISC machine, we only have to worry
3720
             about the way loads are extended.  */
3721
          if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3722
               ? (((nonzero
3723
                    & (((unsigned HOST_WIDE_INT) 1
3724
                        << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3725
                   != 0))
3726
               : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3727
              || !MEM_P (SUBREG_REG (x)))
3728
#endif
3729
            {
3730
              /* On many CISC machines, accessing an object in a wider mode
3731
                 causes the high-order bits to become undefined.  So they are
3732
                 not known to be zero.  */
3733
              if (GET_MODE_SIZE (GET_MODE (x))
3734
                  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3735
                nonzero |= (GET_MODE_MASK (GET_MODE (x))
3736
                            & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3737
            }
3738
        }
3739
      break;
3740
 
3741
    case ASHIFTRT:
3742
    case LSHIFTRT:
3743
    case ASHIFT:
3744
    case ROTATE:
3745
      /* The nonzero bits are in two classes: any bits within MODE
3746
         that aren't in GET_MODE (x) are always significant.  The rest of the
3747
         nonzero bits are those that are significant in the operand of
3748
         the shift when shifted the appropriate number of bits.  This
3749
         shows that high-order bits are cleared by the right shift and
3750
         low-order bits by left shifts.  */
3751
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
3752
          && INTVAL (XEXP (x, 1)) >= 0
3753
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3754
        {
3755
          enum machine_mode inner_mode = GET_MODE (x);
3756
          unsigned int width = GET_MODE_BITSIZE (inner_mode);
3757
          int count = INTVAL (XEXP (x, 1));
3758
          unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3759
          unsigned HOST_WIDE_INT op_nonzero =
3760
            cached_nonzero_bits (XEXP (x, 0), mode,
3761
                                 known_x, known_mode, known_ret);
3762
          unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3763
          unsigned HOST_WIDE_INT outer = 0;
3764
 
3765
          if (mode_width > width)
3766
            outer = (op_nonzero & nonzero & ~mode_mask);
3767
 
3768
          if (code == LSHIFTRT)
3769
            inner >>= count;
3770
          else if (code == ASHIFTRT)
3771
            {
3772
              inner >>= count;
3773
 
3774
              /* If the sign bit may have been nonzero before the shift, we
3775
                 need to mark all the places it could have been copied to
3776
                 by the shift as possibly nonzero.  */
3777
              if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3778
                inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3779
            }
3780
          else if (code == ASHIFT)
3781
            inner <<= count;
3782
          else
3783
            inner = ((inner << (count % width)
3784
                      | (inner >> (width - (count % width)))) & mode_mask);
3785
 
3786
          nonzero &= (outer | inner);
3787
        }
3788
      break;
3789
 
3790
    case FFS:
3791
    case POPCOUNT:
3792
      /* This is at most the number of bits in the mode.  */
3793
      nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3794
      break;
3795
 
3796
    case CLZ:
3797
      /* If CLZ has a known value at zero, then the nonzero bits are
3798
         that value, plus the number of bits in the mode minus one.  */
3799
      if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3800
        nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3801
      else
3802
        nonzero = -1;
3803
      break;
3804
 
3805
    case CTZ:
3806
      /* If CTZ has a known value at zero, then the nonzero bits are
3807
         that value, plus the number of bits in the mode minus one.  */
3808
      if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3809
        nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3810
      else
3811
        nonzero = -1;
3812
      break;
3813
 
3814
    case PARITY:
3815
      nonzero = 1;
3816
      break;
3817
 
3818
    case IF_THEN_ELSE:
3819
      {
3820
        unsigned HOST_WIDE_INT nonzero_true =
3821
          cached_nonzero_bits (XEXP (x, 1), mode,
3822
                               known_x, known_mode, known_ret);
3823
 
3824
        /* Don't call nonzero_bits for the second time if it cannot change
3825
           anything.  */
3826
        if ((nonzero & nonzero_true) != nonzero)
3827
          nonzero &= nonzero_true
3828
                     | cached_nonzero_bits (XEXP (x, 2), mode,
3829
                                            known_x, known_mode, known_ret);
3830
      }
3831
      break;
3832
 
3833
    default:
3834
      break;
3835
    }
3836
 
3837
  return nonzero;
3838
}
3839
 
3840
/* See the macro definition above.  */
3841
#undef cached_num_sign_bit_copies
3842
 
3843
 
3844
/* The function cached_num_sign_bit_copies is a wrapper around
3845
   num_sign_bit_copies1.  It avoids exponential behavior in
3846
   num_sign_bit_copies1 when X has identical subexpressions on the
3847
   first or the second level.  */
3848
 
3849
static unsigned int
3850
cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
3851
                            enum machine_mode known_mode,
3852
                            unsigned int known_ret)
3853
{
3854
  if (x == known_x && mode == known_mode)
3855
    return known_ret;
3856
 
3857
  /* Try to find identical subexpressions.  If found call
3858
     num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
3859
     the precomputed value for the subexpression as KNOWN_RET.  */
3860
 
3861
  if (ARITHMETIC_P (x))
3862
    {
3863
      rtx x0 = XEXP (x, 0);
3864
      rtx x1 = XEXP (x, 1);
3865
 
3866
      /* Check the first level.  */
3867
      if (x0 == x1)
3868
        return
3869
          num_sign_bit_copies1 (x, mode, x0, mode,
3870
                                cached_num_sign_bit_copies (x0, mode, known_x,
3871
                                                            known_mode,
3872
                                                            known_ret));
3873
 
3874
      /* Check the second level.  */
3875
      if (ARITHMETIC_P (x0)
3876
          && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3877
        return
3878
          num_sign_bit_copies1 (x, mode, x1, mode,
3879
                                cached_num_sign_bit_copies (x1, mode, known_x,
3880
                                                            known_mode,
3881
                                                            known_ret));
3882
 
3883
      if (ARITHMETIC_P (x1)
3884
          && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3885
        return
3886
          num_sign_bit_copies1 (x, mode, x0, mode,
3887
                                cached_num_sign_bit_copies (x0, mode, known_x,
3888
                                                            known_mode,
3889
                                                            known_ret));
3890
    }
3891
 
3892
  return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
3893
}
3894
 
3895
/* Return the number of bits at the high-order end of X that are known to
3896
   be equal to the sign bit.  X will be used in mode MODE; if MODE is
3897
   VOIDmode, X will be used in its own mode.  The returned value  will always
3898
   be between 1 and the number of bits in MODE.  */
3899
 
3900
static unsigned int
3901
num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
3902
                      enum machine_mode known_mode,
3903
                      unsigned int known_ret)
3904
{
3905
  enum rtx_code code = GET_CODE (x);
3906
  unsigned int bitwidth = GET_MODE_BITSIZE (mode);
3907
  int num0, num1, result;
3908
  unsigned HOST_WIDE_INT nonzero;
3909
 
3910
  /* If we weren't given a mode, use the mode of X.  If the mode is still
3911
     VOIDmode, we don't know anything.  Likewise if one of the modes is
3912
     floating-point.  */
3913
 
3914
  if (mode == VOIDmode)
3915
    mode = GET_MODE (x);
3916
 
3917
  if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
3918
    return 1;
3919
 
3920
  /* For a smaller object, just ignore the high bits.  */
3921
  if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
3922
    {
3923
      num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
3924
                                         known_x, known_mode, known_ret);
3925
      return MAX (1,
3926
                  num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
3927
    }
3928
 
3929
  if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
3930
    {
3931
#ifndef WORD_REGISTER_OPERATIONS
3932
  /* If this machine does not do all register operations on the entire
3933
     register and MODE is wider than the mode of X, we can say nothing
3934
     at all about the high-order bits.  */
3935
      return 1;
3936
#else
3937
      /* Likewise on machines that do, if the mode of the object is smaller
3938
         than a word and loads of that size don't sign extend, we can say
3939
         nothing about the high order bits.  */
3940
      if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
3941
#ifdef LOAD_EXTEND_OP
3942
          && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
3943
#endif
3944
          )
3945
        return 1;
3946
#endif
3947
    }
3948
 
3949
  switch (code)
3950
    {
3951
    case REG:
3952
 
3953
#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3954
      /* If pointers extend signed and this is a pointer in Pmode, say that
3955
         all the bits above ptr_mode are known to be sign bit copies.  */
3956
      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
3957
          && REG_POINTER (x))
3958
        return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
3959
#endif
3960
 
3961
      {
3962
        unsigned int copies_for_hook = 1, copies = 1;
3963
        rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
3964
                                                     known_mode, known_ret,
3965
                                                     &copies_for_hook);
3966
 
3967
        if (new)
3968
          copies = cached_num_sign_bit_copies (new, mode, known_x,
3969
                                               known_mode, known_ret);
3970
 
3971
        if (copies > 1 || copies_for_hook > 1)
3972
          return MAX (copies, copies_for_hook);
3973
 
3974
        /* Else, use nonzero_bits to guess num_sign_bit_copies (see below).  */
3975
      }
3976
      break;
3977
 
3978
    case MEM:
3979
#ifdef LOAD_EXTEND_OP
3980
      /* Some RISC machines sign-extend all loads of smaller than a word.  */
3981
      if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
3982
        return MAX (1, ((int) bitwidth
3983
                        - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
3984
#endif
3985
      break;
3986
 
3987
    case CONST_INT:
3988
      /* If the constant is negative, take its 1's complement and remask.
3989
         Then see how many zero bits we have.  */
3990
      nonzero = INTVAL (x) & GET_MODE_MASK (mode);
3991
      if (bitwidth <= HOST_BITS_PER_WIDE_INT
3992
          && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
3993
        nonzero = (~nonzero) & GET_MODE_MASK (mode);
3994
 
3995
      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
3996
 
3997
    case SUBREG:
3998
      /* If this is a SUBREG for a promoted object that is sign-extended
3999
         and we are looking at it in a wider mode, we know that at least the
4000
         high-order bits are known to be sign bit copies.  */
4001
 
4002
      if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4003
        {
4004
          num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4005
                                             known_x, known_mode, known_ret);
4006
          return MAX ((int) bitwidth
4007
                      - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4008
                      num0);
4009
        }
4010
 
4011
      /* For a smaller object, just ignore the high bits.  */
4012
      if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4013
        {
4014
          num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4015
                                             known_x, known_mode, known_ret);
4016
          return MAX (1, (num0
4017
                          - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4018
                                   - bitwidth)));
4019
        }
4020
 
4021
#ifdef WORD_REGISTER_OPERATIONS
4022
#ifdef LOAD_EXTEND_OP
4023
      /* For paradoxical SUBREGs on machines where all register operations
4024
         affect the entire register, just look inside.  Note that we are
4025
         passing MODE to the recursive call, so the number of sign bit copies
4026
         will remain relative to that mode, not the inner mode.  */
4027
 
4028
      /* This works only if loads sign extend.  Otherwise, if we get a
4029
         reload for the inner part, it may be loaded from the stack, and
4030
         then we lose all sign bit copies that existed before the store
4031
         to the stack.  */
4032
 
4033
      if ((GET_MODE_SIZE (GET_MODE (x))
4034
           > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4035
          && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4036
          && MEM_P (SUBREG_REG (x)))
4037
        return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4038
                                           known_x, known_mode, known_ret);
4039
#endif
4040
#endif
4041
      break;
4042
 
4043
    case SIGN_EXTRACT:
4044
      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4045
        return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4046
      break;
4047
 
4048
    case SIGN_EXTEND:
4049
      return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4050
              + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4051
                                            known_x, known_mode, known_ret));
4052
 
4053
    case TRUNCATE:
4054
      /* For a smaller object, just ignore the high bits.  */
4055
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4056
                                         known_x, known_mode, known_ret);
4057
      return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4058
                                    - bitwidth)));
4059
 
4060
    case NOT:
4061
      return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4062
                                         known_x, known_mode, known_ret);
4063
 
4064
    case ROTATE:       case ROTATERT:
4065
      /* If we are rotating left by a number of bits less than the number
4066
         of sign bit copies, we can just subtract that amount from the
4067
         number.  */
4068
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
4069
          && INTVAL (XEXP (x, 1)) >= 0
4070
          && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4071
        {
4072
          num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4073
                                             known_x, known_mode, known_ret);
4074
          return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4075
                                 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4076
        }
4077
      break;
4078
 
4079
    case NEG:
4080
      /* In general, this subtracts one sign bit copy.  But if the value
4081
         is known to be positive, the number of sign bit copies is the
4082
         same as that of the input.  Finally, if the input has just one bit
4083
         that might be nonzero, all the bits are copies of the sign bit.  */
4084
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4085
                                         known_x, known_mode, known_ret);
4086
      if (bitwidth > HOST_BITS_PER_WIDE_INT)
4087
        return num0 > 1 ? num0 - 1 : 1;
4088
 
4089
      nonzero = nonzero_bits (XEXP (x, 0), mode);
4090
      if (nonzero == 1)
4091
        return bitwidth;
4092
 
4093
      if (num0 > 1
4094
          && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4095
        num0--;
4096
 
4097
      return num0;
4098
 
4099
    case IOR:   case AND:   case XOR:
4100
    case SMIN:  case SMAX:  case UMIN:  case UMAX:
4101
      /* Logical operations will preserve the number of sign-bit copies.
4102
         MIN and MAX operations always return one of the operands.  */
4103
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4104
                                         known_x, known_mode, known_ret);
4105
      num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4106
                                         known_x, known_mode, known_ret);
4107
      return MIN (num0, num1);
4108
 
4109
    case PLUS:  case MINUS:
4110
      /* For addition and subtraction, we can have a 1-bit carry.  However,
4111
         if we are subtracting 1 from a positive number, there will not
4112
         be such a carry.  Furthermore, if the positive number is known to
4113
         be 0 or 1, we know the result is either -1 or 0.  */
4114
 
4115
      if (code == PLUS && XEXP (x, 1) == constm1_rtx
4116
          && bitwidth <= HOST_BITS_PER_WIDE_INT)
4117
        {
4118
          nonzero = nonzero_bits (XEXP (x, 0), mode);
4119
          if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4120
            return (nonzero == 1 || nonzero == 0 ? bitwidth
4121
                    : bitwidth - floor_log2 (nonzero) - 1);
4122
        }
4123
 
4124
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4125
                                         known_x, known_mode, known_ret);
4126
      num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4127
                                         known_x, known_mode, known_ret);
4128
      result = MAX (1, MIN (num0, num1) - 1);
4129
 
4130
#ifdef POINTERS_EXTEND_UNSIGNED
4131
      /* If pointers extend signed and this is an addition or subtraction
4132
         to a pointer in Pmode, all the bits above ptr_mode are known to be
4133
         sign bit copies.  */
4134
      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4135
          && (code == PLUS || code == MINUS)
4136
          && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4137
        result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4138
                             - GET_MODE_BITSIZE (ptr_mode) + 1),
4139
                      result);
4140
#endif
4141
      return result;
4142
 
4143
    case MULT:
4144
      /* The number of bits of the product is the sum of the number of
4145
         bits of both terms.  However, unless one of the terms if known
4146
         to be positive, we must allow for an additional bit since negating
4147
         a negative number can remove one sign bit copy.  */
4148
 
4149
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4150
                                         known_x, known_mode, known_ret);
4151
      num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4152
                                         known_x, known_mode, known_ret);
4153
 
4154
      result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4155
      if (result > 0
4156
          && (bitwidth > HOST_BITS_PER_WIDE_INT
4157
              || (((nonzero_bits (XEXP (x, 0), mode)
4158
                    & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4159
                  && ((nonzero_bits (XEXP (x, 1), mode)
4160
                       & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4161
        result--;
4162
 
4163
      return MAX (1, result);
4164
 
4165
    case UDIV:
4166
      /* The result must be <= the first operand.  If the first operand
4167
         has the high bit set, we know nothing about the number of sign
4168
         bit copies.  */
4169
      if (bitwidth > HOST_BITS_PER_WIDE_INT)
4170
        return 1;
4171
      else if ((nonzero_bits (XEXP (x, 0), mode)
4172
                & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4173
        return 1;
4174
      else
4175
        return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4176
                                           known_x, known_mode, known_ret);
4177
 
4178
    case UMOD:
4179
      /* The result must be <= the second operand.  */
4180
      return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4181
                                           known_x, known_mode, known_ret);
4182
 
4183
    case DIV:
4184
      /* Similar to unsigned division, except that we have to worry about
4185
         the case where the divisor is negative, in which case we have
4186
         to add 1.  */
4187
      result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4188
                                           known_x, known_mode, known_ret);
4189
      if (result > 1
4190
          && (bitwidth > HOST_BITS_PER_WIDE_INT
4191
              || (nonzero_bits (XEXP (x, 1), mode)
4192
                  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4193
        result--;
4194
 
4195
      return result;
4196
 
4197
    case MOD:
4198
      result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4199
                                           known_x, known_mode, known_ret);
4200
      if (result > 1
4201
          && (bitwidth > HOST_BITS_PER_WIDE_INT
4202
              || (nonzero_bits (XEXP (x, 1), mode)
4203
                  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4204
        result--;
4205
 
4206
      return result;
4207
 
4208
    case ASHIFTRT:
4209
      /* Shifts by a constant add to the number of bits equal to the
4210
         sign bit.  */
4211
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4212
                                         known_x, known_mode, known_ret);
4213
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
4214
          && INTVAL (XEXP (x, 1)) > 0)
4215
        num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4216
 
4217
      return num0;
4218
 
4219
    case ASHIFT:
4220
      /* Left shifts destroy copies.  */
4221
      if (GET_CODE (XEXP (x, 1)) != CONST_INT
4222
          || INTVAL (XEXP (x, 1)) < 0
4223
          || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4224
        return 1;
4225
 
4226
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4227
                                         known_x, known_mode, known_ret);
4228
      return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4229
 
4230
    case IF_THEN_ELSE:
4231
      num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4232
                                         known_x, known_mode, known_ret);
4233
      num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4234
                                         known_x, known_mode, known_ret);
4235
      return MIN (num0, num1);
4236
 
4237
    case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
4238
    case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
4239
    case GEU: case GTU: case LEU: case LTU:
4240
    case UNORDERED: case ORDERED:
4241
      /* If the constant is negative, take its 1's complement and remask.
4242
         Then see how many zero bits we have.  */
4243
      nonzero = STORE_FLAG_VALUE;
4244
      if (bitwidth <= HOST_BITS_PER_WIDE_INT
4245
          && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4246
        nonzero = (~nonzero) & GET_MODE_MASK (mode);
4247
 
4248
      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4249
 
4250
    default:
4251
      break;
4252
    }
4253
 
4254
  /* If we haven't been able to figure it out by one of the above rules,
4255
     see if some of the high-order bits are known to be zero.  If so,
4256
     count those bits and return one less than that amount.  If we can't
4257
     safely compute the mask for this mode, always return BITWIDTH.  */
4258
 
4259
  bitwidth = GET_MODE_BITSIZE (mode);
4260
  if (bitwidth > HOST_BITS_PER_WIDE_INT)
4261
    return 1;
4262
 
4263
  nonzero = nonzero_bits (x, mode);
4264
  return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4265
         ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4266
}
4267
 
4268
/* Calculate the rtx_cost of a single instruction.  A return value of
4269
   zero indicates an instruction pattern without a known cost.  */
4270
 
4271
int
4272
insn_rtx_cost (rtx pat)
4273
{
4274
  int i, cost;
4275
  rtx set;
4276
 
4277
  /* Extract the single set rtx from the instruction pattern.
4278
     We can't use single_set since we only have the pattern.  */
4279
  if (GET_CODE (pat) == SET)
4280
    set = pat;
4281
  else if (GET_CODE (pat) == PARALLEL)
4282
    {
4283
      set = NULL_RTX;
4284
      for (i = 0; i < XVECLEN (pat, 0); i++)
4285
        {
4286
          rtx x = XVECEXP (pat, 0, i);
4287
          if (GET_CODE (x) == SET)
4288
            {
4289
              if (set)
4290
                return 0;
4291
              set = x;
4292
            }
4293
        }
4294
      if (!set)
4295
        return 0;
4296
    }
4297
  else
4298
    return 0;
4299
 
4300
  cost = rtx_cost (SET_SRC (set), SET);
4301
  return cost > 0 ? cost : COSTS_N_INSNS (1);
4302
}
4303
 
4304
/* Given an insn INSN and condition COND, return the condition in a
4305
   canonical form to simplify testing by callers.  Specifically:
4306
 
4307
   (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4308
   (2) Both operands will be machine operands; (cc0) will have been replaced.
4309
   (3) If an operand is a constant, it will be the second operand.
4310
   (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4311
       for GE, GEU, and LEU.
4312
 
4313
   If the condition cannot be understood, or is an inequality floating-point
4314
   comparison which needs to be reversed, 0 will be returned.
4315
 
4316
   If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4317
 
4318
   If EARLIEST is nonzero, it is a pointer to a place where the earliest
4319
   insn used in locating the condition was found.  If a replacement test
4320
   of the condition is desired, it should be placed in front of that
4321
   insn and we will be sure that the inputs are still valid.
4322
 
4323
   If WANT_REG is nonzero, we wish the condition to be relative to that
4324
   register, if possible.  Therefore, do not canonicalize the condition
4325
   further.  If ALLOW_CC_MODE is nonzero, allow the condition returned
4326
   to be a compare to a CC mode register.
4327
 
4328
   If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4329
   and at INSN.  */
4330
 
4331
rtx
4332
canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4333
                        rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4334
{
4335
  enum rtx_code code;
4336
  rtx prev = insn;
4337
  rtx set;
4338
  rtx tem;
4339
  rtx op0, op1;
4340
  int reverse_code = 0;
4341
  enum machine_mode mode;
4342
  basic_block bb = BLOCK_FOR_INSN (insn);
4343
 
4344
  code = GET_CODE (cond);
4345
  mode = GET_MODE (cond);
4346
  op0 = XEXP (cond, 0);
4347
  op1 = XEXP (cond, 1);
4348
 
4349
  if (reverse)
4350
    code = reversed_comparison_code (cond, insn);
4351
  if (code == UNKNOWN)
4352
    return 0;
4353
 
4354
  if (earliest)
4355
    *earliest = insn;
4356
 
4357
  /* If we are comparing a register with zero, see if the register is set
4358
     in the previous insn to a COMPARE or a comparison operation.  Perform
4359
     the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4360
     in cse.c  */
4361
 
4362
  while ((GET_RTX_CLASS (code) == RTX_COMPARE
4363
          || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4364
         && op1 == CONST0_RTX (GET_MODE (op0))
4365
         && op0 != want_reg)
4366
    {
4367
      /* Set nonzero when we find something of interest.  */
4368
      rtx x = 0;
4369
 
4370
#ifdef HAVE_cc0
4371
      /* If comparison with cc0, import actual comparison from compare
4372
         insn.  */
4373
      if (op0 == cc0_rtx)
4374
        {
4375
          if ((prev = prev_nonnote_insn (prev)) == 0
4376
              || !NONJUMP_INSN_P (prev)
4377
              || (set = single_set (prev)) == 0
4378
              || SET_DEST (set) != cc0_rtx)
4379
            return 0;
4380
 
4381
          op0 = SET_SRC (set);
4382
          op1 = CONST0_RTX (GET_MODE (op0));
4383
          if (earliest)
4384
            *earliest = prev;
4385
        }
4386
#endif
4387
 
4388
      /* If this is a COMPARE, pick up the two things being compared.  */
4389
      if (GET_CODE (op0) == COMPARE)
4390
        {
4391
          op1 = XEXP (op0, 1);
4392
          op0 = XEXP (op0, 0);
4393
          continue;
4394
        }
4395
      else if (!REG_P (op0))
4396
        break;
4397
 
4398
      /* Go back to the previous insn.  Stop if it is not an INSN.  We also
4399
         stop if it isn't a single set or if it has a REG_INC note because
4400
         we don't want to bother dealing with it.  */
4401
 
4402
      if ((prev = prev_nonnote_insn (prev)) == 0
4403
          || !NONJUMP_INSN_P (prev)
4404
          || FIND_REG_INC_NOTE (prev, NULL_RTX)
4405
          /* In cfglayout mode, there do not have to be labels at the
4406
             beginning of a block, or jumps at the end, so the previous
4407
             conditions would not stop us when we reach bb boundary.  */
4408
          || BLOCK_FOR_INSN (prev) != bb)
4409
        break;
4410
 
4411
      set = set_of (op0, prev);
4412
 
4413
      if (set
4414
          && (GET_CODE (set) != SET
4415
              || !rtx_equal_p (SET_DEST (set), op0)))
4416
        break;
4417
 
4418
      /* If this is setting OP0, get what it sets it to if it looks
4419
         relevant.  */
4420
      if (set)
4421
        {
4422
          enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4423
#ifdef FLOAT_STORE_FLAG_VALUE
4424
          REAL_VALUE_TYPE fsfv;
4425
#endif
4426
 
4427
          /* ??? We may not combine comparisons done in a CCmode with
4428
             comparisons not done in a CCmode.  This is to aid targets
4429
             like Alpha that have an IEEE compliant EQ instruction, and
4430
             a non-IEEE compliant BEQ instruction.  The use of CCmode is
4431
             actually artificial, simply to prevent the combination, but
4432
             should not affect other platforms.
4433
 
4434
             However, we must allow VOIDmode comparisons to match either
4435
             CCmode or non-CCmode comparison, because some ports have
4436
             modeless comparisons inside branch patterns.
4437
 
4438
             ??? This mode check should perhaps look more like the mode check
4439
             in simplify_comparison in combine.  */
4440
 
4441
          if ((GET_CODE (SET_SRC (set)) == COMPARE
4442
               || (((code == NE
4443
                     || (code == LT
4444
                         && GET_MODE_CLASS (inner_mode) == MODE_INT
4445
                         && (GET_MODE_BITSIZE (inner_mode)
4446
                             <= HOST_BITS_PER_WIDE_INT)
4447
                         && (STORE_FLAG_VALUE
4448
                             & ((HOST_WIDE_INT) 1
4449
                                << (GET_MODE_BITSIZE (inner_mode) - 1))))
4450
#ifdef FLOAT_STORE_FLAG_VALUE
4451
                     || (code == LT
4452
                         && SCALAR_FLOAT_MODE_P (inner_mode)
4453
                         && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4454
                             REAL_VALUE_NEGATIVE (fsfv)))
4455
#endif
4456
                     ))
4457
                   && COMPARISON_P (SET_SRC (set))))
4458
              && (((GET_MODE_CLASS (mode) == MODE_CC)
4459
                   == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4460
                  || mode == VOIDmode || inner_mode == VOIDmode))
4461
            x = SET_SRC (set);
4462
          else if (((code == EQ
4463
                     || (code == GE
4464
                         && (GET_MODE_BITSIZE (inner_mode)
4465
                             <= HOST_BITS_PER_WIDE_INT)
4466
                         && GET_MODE_CLASS (inner_mode) == MODE_INT
4467
                         && (STORE_FLAG_VALUE
4468
                             & ((HOST_WIDE_INT) 1
4469
                                << (GET_MODE_BITSIZE (inner_mode) - 1))))
4470
#ifdef FLOAT_STORE_FLAG_VALUE
4471
                     || (code == GE
4472
                         && SCALAR_FLOAT_MODE_P (inner_mode)
4473
                         && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4474
                             REAL_VALUE_NEGATIVE (fsfv)))
4475
#endif
4476
                     ))
4477
                   && COMPARISON_P (SET_SRC (set))
4478
                   && (((GET_MODE_CLASS (mode) == MODE_CC)
4479
                        == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4480
                       || mode == VOIDmode || inner_mode == VOIDmode))
4481
 
4482
            {
4483
              reverse_code = 1;
4484
              x = SET_SRC (set);
4485
            }
4486
          else
4487
            break;
4488
        }
4489
 
4490
      else if (reg_set_p (op0, prev))
4491
        /* If this sets OP0, but not directly, we have to give up.  */
4492
        break;
4493
 
4494
      if (x)
4495
        {
4496
          /* If the caller is expecting the condition to be valid at INSN,
4497
             make sure X doesn't change before INSN.  */
4498
          if (valid_at_insn_p)
4499
            if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4500
              break;
4501
          if (COMPARISON_P (x))
4502
            code = GET_CODE (x);
4503
          if (reverse_code)
4504
            {
4505
              code = reversed_comparison_code (x, prev);
4506
              if (code == UNKNOWN)
4507
                return 0;
4508
              reverse_code = 0;
4509
            }
4510
 
4511
          op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4512
          if (earliest)
4513
            *earliest = prev;
4514
        }
4515
    }
4516
 
4517
  /* If constant is first, put it last.  */
4518
  if (CONSTANT_P (op0))
4519
    code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4520
 
4521
  /* If OP0 is the result of a comparison, we weren't able to find what
4522
     was really being compared, so fail.  */
4523
  if (!allow_cc_mode
4524
      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4525
    return 0;
4526
 
4527
  /* Canonicalize any ordered comparison with integers involving equality
4528
     if we can do computations in the relevant mode and we do not
4529
     overflow.  */
4530
 
4531
  if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4532
      && GET_CODE (op1) == CONST_INT
4533
      && GET_MODE (op0) != VOIDmode
4534
      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4535
    {
4536
      HOST_WIDE_INT const_val = INTVAL (op1);
4537
      unsigned HOST_WIDE_INT uconst_val = const_val;
4538
      unsigned HOST_WIDE_INT max_val
4539
        = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4540
 
4541
      switch (code)
4542
        {
4543
        case LE:
4544
          if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4545
            code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4546
          break;
4547
 
4548
        /* When cross-compiling, const_val might be sign-extended from
4549
           BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4550
        case GE:
4551
          if ((HOST_WIDE_INT) (const_val & max_val)
4552
              != (((HOST_WIDE_INT) 1
4553
                   << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4554
            code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4555
          break;
4556
 
4557
        case LEU:
4558
          if (uconst_val < max_val)
4559
            code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4560
          break;
4561
 
4562
        case GEU:
4563
          if (uconst_val != 0)
4564
            code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4565
          break;
4566
 
4567
        default:
4568
          break;
4569
        }
4570
    }
4571
 
4572
  /* Never return CC0; return zero instead.  */
4573
  if (CC0_P (op0))
4574
    return 0;
4575
 
4576
  return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4577
}
4578
 
4579
/* Given a jump insn JUMP, return the condition that will cause it to branch
4580
   to its JUMP_LABEL.  If the condition cannot be understood, or is an
4581
   inequality floating-point comparison which needs to be reversed, 0 will
4582
   be returned.
4583
 
4584
   If EARLIEST is nonzero, it is a pointer to a place where the earliest
4585
   insn used in locating the condition was found.  If a replacement test
4586
   of the condition is desired, it should be placed in front of that
4587
   insn and we will be sure that the inputs are still valid.  If EARLIEST
4588
   is null, the returned condition will be valid at INSN.
4589
 
4590
   If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4591
   compare CC mode register.
4592
 
4593
   VALID_AT_INSN_P is the same as for canonicalize_condition.  */
4594
 
4595
rtx
4596
get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4597
{
4598
  rtx cond;
4599
  int reverse;
4600
  rtx set;
4601
 
4602
  /* If this is not a standard conditional jump, we can't parse it.  */
4603
  if (!JUMP_P (jump)
4604
      || ! any_condjump_p (jump))
4605
    return 0;
4606
  set = pc_set (jump);
4607
 
4608
  cond = XEXP (SET_SRC (set), 0);
4609
 
4610
  /* If this branches to JUMP_LABEL when the condition is false, reverse
4611
     the condition.  */
4612
  reverse
4613
    = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4614
      && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4615
 
4616
  return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4617
                                 allow_cc_mode, valid_at_insn_p);
4618
}
4619
 
4620
/* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4621
   TARGET_MODE_REP_EXTENDED.
4622
 
4623
   Note that we assume that the property of
4624
   TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4625
   narrower than mode B.  I.e., if A is a mode narrower than B then in
4626
   order to be able to operate on it in mode B, mode A needs to
4627
   satisfy the requirements set by the representation of mode B.  */
4628
 
4629
static void
4630
init_num_sign_bit_copies_in_rep (void)
4631
{
4632
  enum machine_mode mode, in_mode;
4633
 
4634
  for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4635
       in_mode = GET_MODE_WIDER_MODE (mode))
4636
    for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4637
         mode = GET_MODE_WIDER_MODE (mode))
4638
      {
4639
        enum machine_mode i;
4640
 
4641
        /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4642
           extends to the next widest mode.  */
4643
        gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4644
                    || GET_MODE_WIDER_MODE (mode) == in_mode);
4645
 
4646
        /* We are in in_mode.  Count how many bits outside of mode
4647
           have to be copies of the sign-bit.  */
4648
        for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4649
          {
4650
            enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4651
 
4652
            if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4653
                /* We can only check sign-bit copies starting from the
4654
                   top-bit.  In order to be able to check the bits we
4655
                   have already seen we pretend that subsequent bits
4656
                   have to be sign-bit copies too.  */
4657
                || num_sign_bit_copies_in_rep [in_mode][mode])
4658
              num_sign_bit_copies_in_rep [in_mode][mode]
4659
                += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4660
          }
4661
      }
4662
}
4663
 
4664
/* Suppose that truncation from the machine mode of X to MODE is not a
4665
   no-op.  See if there is anything special about X so that we can
4666
   assume it already contains a truncated value of MODE.  */
4667
 
4668
bool
4669
truncated_to_mode (enum machine_mode mode, rtx x)
4670
{
4671
  /* This register has already been used in MODE without explicit
4672
     truncation.  */
4673
  if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
4674
    return true;
4675
 
4676
  /* See if we already satisfy the requirements of MODE.  If yes we
4677
     can just switch to MODE.  */
4678
  if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
4679
      && (num_sign_bit_copies (x, GET_MODE (x))
4680
          >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
4681
    return true;
4682
 
4683
  return false;
4684
}
4685
 
4686
/* Initialize non_rtx_starting_operands, which is used to speed up
4687
   for_each_rtx.  */
4688
void
4689
init_rtlanal (void)
4690
{
4691
  int i;
4692
  for (i = 0; i < NUM_RTX_CODE; i++)
4693
    {
4694
      const char *format = GET_RTX_FORMAT (i);
4695
      const char *first = strpbrk (format, "eEV");
4696
      non_rtx_starting_operands[i] = first ? first - format : -1;
4697
    }
4698
 
4699
  init_num_sign_bit_copies_in_rep ();
4700
}
4701
 
4702
/* Check whether this is a constant pool constant.  */
4703
bool
4704
constant_pool_constant_p (rtx x)
4705
{
4706
  x = avoid_constant_pool_reference (x);
4707
  return GET_CODE (x) == CONST_DOUBLE;
4708
}
4709
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.