OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [rtlanal.c] - Blame information for rev 20

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Analyze RTL for C-Compiler
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 2, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING.  If not, write to the Free
19
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20
02110-1301, USA.  */
21
 
22
 
23
#include "config.h"
24
#include "system.h"
25
#include "coretypes.h"
26
#include "tm.h"
27
#include "toplev.h"
28
#include "rtl.h"
29
#include "hard-reg-set.h"
30
#include "insn-config.h"
31
#include "recog.h"
32
#include "target.h"
33
#include "output.h"
34
#include "tm_p.h"
35
#include "flags.h"
36
#include "real.h"
37
#include "regs.h"
38
#include "function.h"
39
 
40
/* Forward declarations */
41
static int global_reg_mentioned_p_1 (rtx *, void *);
42
static void set_of_1 (rtx, rtx, void *);
43
static bool covers_regno_p (rtx, unsigned int);
44
static bool covers_regno_no_parallel_p (rtx, unsigned int);
45
static int rtx_referenced_p_1 (rtx *, void *);
46
static int computed_jump_p_1 (rtx);
47
static void parms_set (rtx, rtx, void *);
48
 
49
static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
50
                                                   rtx, enum machine_mode,
51
                                                   unsigned HOST_WIDE_INT);
52
static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
53
                                             enum machine_mode,
54
                                             unsigned HOST_WIDE_INT);
55
static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
56
                                                enum machine_mode,
57
                                                unsigned int);
58
static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
59
                                          enum machine_mode, unsigned int);
60
 
61
/* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62
   -1 if a code has no such operand.  */
63
static int non_rtx_starting_operands[NUM_RTX_CODE];
64
 
65
/* Bit flags that specify the machine subtype we are compiling for.
66
   Bits are tested using macros TARGET_... defined in the tm.h file
67
   and set by `-m...' switches.  Must be defined in rtlanal.c.  */
68
 
69
int target_flags;
70
 
71
/* Return 1 if the value of X is unstable
72
   (would be different at a different point in the program).
73
   The frame pointer, arg pointer, etc. are considered stable
74
   (within one function) and so is anything marked `unchanging'.  */
75
 
76
int
77
rtx_unstable_p (rtx x)
78
{
79
  RTX_CODE code = GET_CODE (x);
80
  int i;
81
  const char *fmt;
82
 
83
  switch (code)
84
    {
85
    case MEM:
86
      return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
87
 
88
    case CONST:
89
    case CONST_INT:
90
    case CONST_DOUBLE:
91
    case CONST_VECTOR:
92
    case SYMBOL_REF:
93
    case LABEL_REF:
94
      return 0;
95
 
96
    case REG:
97
      /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
98
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
99
          /* The arg pointer varies if it is not a fixed register.  */
100
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
101
        return 0;
102
#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
103
      /* ??? When call-clobbered, the value is stable modulo the restore
104
         that must happen after a call.  This currently screws up local-alloc
105
         into believing that the restore is not needed.  */
106
      if (x == pic_offset_table_rtx)
107
        return 0;
108
#endif
109
      return 1;
110
 
111
    case ASM_OPERANDS:
112
      if (MEM_VOLATILE_P (x))
113
        return 1;
114
 
115
      /* Fall through.  */
116
 
117
    default:
118
      break;
119
    }
120
 
121
  fmt = GET_RTX_FORMAT (code);
122
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
123
    if (fmt[i] == 'e')
124
      {
125
        if (rtx_unstable_p (XEXP (x, i)))
126
          return 1;
127
      }
128
    else if (fmt[i] == 'E')
129
      {
130
        int j;
131
        for (j = 0; j < XVECLEN (x, i); j++)
132
          if (rtx_unstable_p (XVECEXP (x, i, j)))
133
            return 1;
134
      }
135
 
136
  return 0;
137
}
138
 
139
/* Return 1 if X has a value that can vary even between two
140
   executions of the program.  0 means X can be compared reliably
141
   against certain constants or near-constants.
142
   FOR_ALIAS is nonzero if we are called from alias analysis; if it is
143
   zero, we are slightly more conservative.
144
   The frame pointer and the arg pointer are considered constant.  */
145
 
146
int
147
rtx_varies_p (rtx x, int for_alias)
148
{
149
  RTX_CODE code;
150
  int i;
151
  const char *fmt;
152
 
153
  if (!x)
154
    return 0;
155
 
156
  code = GET_CODE (x);
157
  switch (code)
158
    {
159
    case MEM:
160
      return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
161
 
162
    case CONST:
163
    case CONST_INT:
164
    case CONST_DOUBLE:
165
    case CONST_VECTOR:
166
    case SYMBOL_REF:
167
    case LABEL_REF:
168
      return 0;
169
 
170
    case REG:
171
      /* Note that we have to test for the actual rtx used for the frame
172
         and arg pointers and not just the register number in case we have
173
         eliminated the frame and/or arg pointer and are using it
174
         for pseudos.  */
175
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
176
          /* The arg pointer varies if it is not a fixed register.  */
177
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
178
        return 0;
179
      if (x == pic_offset_table_rtx
180
#ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
181
          /* ??? When call-clobbered, the value is stable modulo the restore
182
             that must happen after a call.  This currently screws up
183
             local-alloc into believing that the restore is not needed, so we
184
             must return 0 only if we are called from alias analysis.  */
185
          && for_alias
186
#endif
187
          )
188
        return 0;
189
      return 1;
190
 
191
    case LO_SUM:
192
      /* The operand 0 of a LO_SUM is considered constant
193
         (in fact it is related specifically to operand 1)
194
         during alias analysis.  */
195
      return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
196
             || rtx_varies_p (XEXP (x, 1), for_alias);
197
 
198
    case ASM_OPERANDS:
199
      if (MEM_VOLATILE_P (x))
200
        return 1;
201
 
202
      /* Fall through.  */
203
 
204
    default:
205
      break;
206
    }
207
 
208
  fmt = GET_RTX_FORMAT (code);
209
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
210
    if (fmt[i] == 'e')
211
      {
212
        if (rtx_varies_p (XEXP (x, i), for_alias))
213
          return 1;
214
      }
215
    else if (fmt[i] == 'E')
216
      {
217
        int j;
218
        for (j = 0; j < XVECLEN (x, i); j++)
219
          if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
220
            return 1;
221
      }
222
 
223
  return 0;
224
}
225
 
226
/* Return nonzero if the use of X as an address in a MEM can cause a trap.
227
   MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
228
   whether nonzero is returned for unaligned memory accesses on strict
229
   alignment machines.  */
230
 
231
static int
232
rtx_addr_can_trap_p_1 (rtx x, enum machine_mode mode, bool unaligned_mems)
233
{
234
  enum rtx_code code = GET_CODE (x);
235
 
236
  switch (code)
237
    {
238
    case SYMBOL_REF:
239
      return SYMBOL_REF_WEAK (x);
240
 
241
    case LABEL_REF:
242
      return 0;
243
 
244
    case REG:
245
      /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
246
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
247
          || x == stack_pointer_rtx
248
          /* The arg pointer varies if it is not a fixed register.  */
249
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
250
        return 0;
251
      /* All of the virtual frame registers are stack references.  */
252
      if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
253
          && REGNO (x) <= LAST_VIRTUAL_REGISTER)
254
        return 0;
255
      return 1;
256
 
257
    case CONST:
258
      return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
259
 
260
    case PLUS:
261
      /* An address is assumed not to trap if:
262
         - it is an address that can't trap plus a constant integer,
263
           with the proper remainder modulo the mode size if we are
264
           considering unaligned memory references.  */
265
      if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
266
          && GET_CODE (XEXP (x, 1)) == CONST_INT)
267
        {
268
          HOST_WIDE_INT offset;
269
 
270
          if (!STRICT_ALIGNMENT
271
              || !unaligned_mems
272
              || GET_MODE_SIZE (mode) == 0)
273
            return 0;
274
 
275
          offset = INTVAL (XEXP (x, 1));
276
 
277
#ifdef SPARC_STACK_BOUNDARY_HACK
278
          /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
279
             the real alignment of %sp.  However, when it does this, the
280
             alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
281
          if (SPARC_STACK_BOUNDARY_HACK
282
              && (XEXP (x, 0) == stack_pointer_rtx
283
                  || XEXP (x, 0) == hard_frame_pointer_rtx))
284
            offset -= STACK_POINTER_OFFSET;
285
#endif
286
 
287
          return offset % GET_MODE_SIZE (mode) != 0;
288
        }
289
 
290
      /* - or it is the pic register plus a constant.  */
291
      if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
292
        return 0;
293
 
294
      return 1;
295
 
296
    case LO_SUM:
297
    case PRE_MODIFY:
298
      return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
299
 
300
    case PRE_DEC:
301
    case PRE_INC:
302
    case POST_DEC:
303
    case POST_INC:
304
    case POST_MODIFY:
305
      return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
306
 
307
    default:
308
      break;
309
    }
310
 
311
  /* If it isn't one of the case above, it can cause a trap.  */
312
  return 1;
313
}
314
 
315
/* Return nonzero if the use of X as an address in a MEM can cause a trap.  */
316
 
317
int
318
rtx_addr_can_trap_p (rtx x)
319
{
320
  return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
321
}
322
 
323
/* Return true if X is an address that is known to not be zero.  */
324
 
325
bool
326
nonzero_address_p (rtx x)
327
{
328
  enum rtx_code code = GET_CODE (x);
329
 
330
  switch (code)
331
    {
332
    case SYMBOL_REF:
333
      return !SYMBOL_REF_WEAK (x);
334
 
335
    case LABEL_REF:
336
      return true;
337
 
338
    case REG:
339
      /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
340
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
341
          || x == stack_pointer_rtx
342
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
343
        return true;
344
      /* All of the virtual frame registers are stack references.  */
345
      if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
346
          && REGNO (x) <= LAST_VIRTUAL_REGISTER)
347
        return true;
348
      return false;
349
 
350
    case CONST:
351
      return nonzero_address_p (XEXP (x, 0));
352
 
353
    case PLUS:
354
      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
355
        {
356
          /* Pointers aren't allowed to wrap.  If we've got a register
357
             that is known to be a pointer, and a positive offset, then
358
             the composite can't be zero.  */
359
          if (INTVAL (XEXP (x, 1)) > 0
360
              && REG_P (XEXP (x, 0))
361
              && REG_POINTER (XEXP (x, 0)))
362
            return true;
363
 
364
          return nonzero_address_p (XEXP (x, 0));
365
        }
366
      /* Handle PIC references.  */
367
      else if (XEXP (x, 0) == pic_offset_table_rtx
368
               && CONSTANT_P (XEXP (x, 1)))
369
        return true;
370
      return false;
371
 
372
    case PRE_MODIFY:
373
      /* Similar to the above; allow positive offsets.  Further, since
374
         auto-inc is only allowed in memories, the register must be a
375
         pointer.  */
376
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
377
          && INTVAL (XEXP (x, 1)) > 0)
378
        return true;
379
      return nonzero_address_p (XEXP (x, 0));
380
 
381
    case PRE_INC:
382
      /* Similarly.  Further, the offset is always positive.  */
383
      return true;
384
 
385
    case PRE_DEC:
386
    case POST_DEC:
387
    case POST_INC:
388
    case POST_MODIFY:
389
      return nonzero_address_p (XEXP (x, 0));
390
 
391
    case LO_SUM:
392
      return nonzero_address_p (XEXP (x, 1));
393
 
394
    default:
395
      break;
396
    }
397
 
398
  /* If it isn't one of the case above, might be zero.  */
399
  return false;
400
}
401
 
402
/* Return 1 if X refers to a memory location whose address
403
   cannot be compared reliably with constant addresses,
404
   or if X refers to a BLKmode memory object.
405
   FOR_ALIAS is nonzero if we are called from alias analysis; if it is
406
   zero, we are slightly more conservative.  */
407
 
408
int
409
rtx_addr_varies_p (rtx x, int for_alias)
410
{
411
  enum rtx_code code;
412
  int i;
413
  const char *fmt;
414
 
415
  if (x == 0)
416
    return 0;
417
 
418
  code = GET_CODE (x);
419
  if (code == MEM)
420
    return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
421
 
422
  fmt = GET_RTX_FORMAT (code);
423
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
424
    if (fmt[i] == 'e')
425
      {
426
        if (rtx_addr_varies_p (XEXP (x, i), for_alias))
427
          return 1;
428
      }
429
    else if (fmt[i] == 'E')
430
      {
431
        int j;
432
        for (j = 0; j < XVECLEN (x, i); j++)
433
          if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
434
            return 1;
435
      }
436
  return 0;
437
}
438
 
439
/* Return the value of the integer term in X, if one is apparent;
440
   otherwise return 0.
441
   Only obvious integer terms are detected.
442
   This is used in cse.c with the `related_value' field.  */
443
 
444
HOST_WIDE_INT
445
get_integer_term (rtx x)
446
{
447
  if (GET_CODE (x) == CONST)
448
    x = XEXP (x, 0);
449
 
450
  if (GET_CODE (x) == MINUS
451
      && GET_CODE (XEXP (x, 1)) == CONST_INT)
452
    return - INTVAL (XEXP (x, 1));
453
  if (GET_CODE (x) == PLUS
454
      && GET_CODE (XEXP (x, 1)) == CONST_INT)
455
    return INTVAL (XEXP (x, 1));
456
  return 0;
457
}
458
 
459
/* If X is a constant, return the value sans apparent integer term;
460
   otherwise return 0.
461
   Only obvious integer terms are detected.  */
462
 
463
rtx
464
get_related_value (rtx x)
465
{
466
  if (GET_CODE (x) != CONST)
467
    return 0;
468
  x = XEXP (x, 0);
469
  if (GET_CODE (x) == PLUS
470
      && GET_CODE (XEXP (x, 1)) == CONST_INT)
471
    return XEXP (x, 0);
472
  else if (GET_CODE (x) == MINUS
473
           && GET_CODE (XEXP (x, 1)) == CONST_INT)
474
    return XEXP (x, 0);
475
  return 0;
476
}
477
 
478
/* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
479
   a global register.  */
480
 
481
static int
482
global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
483
{
484
  int regno;
485
  rtx x = *loc;
486
 
487
  if (! x)
488
    return 0;
489
 
490
  switch (GET_CODE (x))
491
    {
492
    case SUBREG:
493
      if (REG_P (SUBREG_REG (x)))
494
        {
495
          if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
496
              && global_regs[subreg_regno (x)])
497
            return 1;
498
          return 0;
499
        }
500
      break;
501
 
502
    case REG:
503
      regno = REGNO (x);
504
      if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
505
        return 1;
506
      return 0;
507
 
508
    case SCRATCH:
509
    case PC:
510
    case CC0:
511
    case CONST_INT:
512
    case CONST_DOUBLE:
513
    case CONST:
514
    case LABEL_REF:
515
      return 0;
516
 
517
    case CALL:
518
      /* A non-constant call might use a global register.  */
519
      return 1;
520
 
521
    default:
522
      break;
523
    }
524
 
525
  return 0;
526
}
527
 
528
/* Returns nonzero if X mentions a global register.  */
529
 
530
int
531
global_reg_mentioned_p (rtx x)
532
{
533
  if (INSN_P (x))
534
    {
535
      if (CALL_P (x))
536
        {
537
          if (! CONST_OR_PURE_CALL_P (x))
538
            return 1;
539
          x = CALL_INSN_FUNCTION_USAGE (x);
540
          if (x == 0)
541
            return 0;
542
        }
543
      else
544
        x = PATTERN (x);
545
    }
546
 
547
  return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
548
}
549
 
550
/* Return the number of places FIND appears within X.  If COUNT_DEST is
551
   zero, we do not count occurrences inside the destination of a SET.  */
552
 
553
int
554
count_occurrences (rtx x, rtx find, int count_dest)
555
{
556
  int i, j;
557
  enum rtx_code code;
558
  const char *format_ptr;
559
  int count;
560
 
561
  if (x == find)
562
    return 1;
563
 
564
  code = GET_CODE (x);
565
 
566
  switch (code)
567
    {
568
    case REG:
569
    case CONST_INT:
570
    case CONST_DOUBLE:
571
    case CONST_VECTOR:
572
    case SYMBOL_REF:
573
    case CODE_LABEL:
574
    case PC:
575
    case CC0:
576
      return 0;
577
 
578
    case MEM:
579
      if (MEM_P (find) && rtx_equal_p (x, find))
580
        return 1;
581
      break;
582
 
583
    case SET:
584
      if (SET_DEST (x) == find && ! count_dest)
585
        return count_occurrences (SET_SRC (x), find, count_dest);
586
      break;
587
 
588
    default:
589
      break;
590
    }
591
 
592
  format_ptr = GET_RTX_FORMAT (code);
593
  count = 0;
594
 
595
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
596
    {
597
      switch (*format_ptr++)
598
        {
599
        case 'e':
600
          count += count_occurrences (XEXP (x, i), find, count_dest);
601
          break;
602
 
603
        case 'E':
604
          for (j = 0; j < XVECLEN (x, i); j++)
605
            count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
606
          break;
607
        }
608
    }
609
  return count;
610
}
611
 
612
/* Nonzero if register REG appears somewhere within IN.
613
   Also works if REG is not a register; in this case it checks
614
   for a subexpression of IN that is Lisp "equal" to REG.  */
615
 
616
int
617
reg_mentioned_p (rtx reg, rtx in)
618
{
619
  const char *fmt;
620
  int i;
621
  enum rtx_code code;
622
 
623
  if (in == 0)
624
    return 0;
625
 
626
  if (reg == in)
627
    return 1;
628
 
629
  if (GET_CODE (in) == LABEL_REF)
630
    return reg == XEXP (in, 0);
631
 
632
  code = GET_CODE (in);
633
 
634
  switch (code)
635
    {
636
      /* Compare registers by number.  */
637
    case REG:
638
      return REG_P (reg) && REGNO (in) == REGNO (reg);
639
 
640
      /* These codes have no constituent expressions
641
         and are unique.  */
642
    case SCRATCH:
643
    case CC0:
644
    case PC:
645
      return 0;
646
 
647
    case CONST_INT:
648
    case CONST_VECTOR:
649
    case CONST_DOUBLE:
650
      /* These are kept unique for a given value.  */
651
      return 0;
652
 
653
    default:
654
      break;
655
    }
656
 
657
  if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
658
    return 1;
659
 
660
  fmt = GET_RTX_FORMAT (code);
661
 
662
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
663
    {
664
      if (fmt[i] == 'E')
665
        {
666
          int j;
667
          for (j = XVECLEN (in, i) - 1; j >= 0; j--)
668
            if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
669
              return 1;
670
        }
671
      else if (fmt[i] == 'e'
672
               && reg_mentioned_p (reg, XEXP (in, i)))
673
        return 1;
674
    }
675
  return 0;
676
}
677
 
678
/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
679
   no CODE_LABEL insn.  */
680
 
681
int
682
no_labels_between_p (rtx beg, rtx end)
683
{
684
  rtx p;
685
  if (beg == end)
686
    return 0;
687
  for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
688
    if (LABEL_P (p))
689
      return 0;
690
  return 1;
691
}
692
 
693
/* Nonzero if register REG is used in an insn between
694
   FROM_INSN and TO_INSN (exclusive of those two).  */
695
 
696
int
697
reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
698
{
699
  rtx insn;
700
 
701
  if (from_insn == to_insn)
702
    return 0;
703
 
704
  for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
705
    if (INSN_P (insn)
706
        && (reg_overlap_mentioned_p (reg, PATTERN (insn))
707
           || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
708
      return 1;
709
  return 0;
710
}
711
 
712
/* Nonzero if the old value of X, a register, is referenced in BODY.  If X
713
   is entirely replaced by a new value and the only use is as a SET_DEST,
714
   we do not consider it a reference.  */
715
 
716
int
717
reg_referenced_p (rtx x, rtx body)
718
{
719
  int i;
720
 
721
  switch (GET_CODE (body))
722
    {
723
    case SET:
724
      if (reg_overlap_mentioned_p (x, SET_SRC (body)))
725
        return 1;
726
 
727
      /* If the destination is anything other than CC0, PC, a REG or a SUBREG
728
         of a REG that occupies all of the REG, the insn references X if
729
         it is mentioned in the destination.  */
730
      if (GET_CODE (SET_DEST (body)) != CC0
731
          && GET_CODE (SET_DEST (body)) != PC
732
          && !REG_P (SET_DEST (body))
733
          && ! (GET_CODE (SET_DEST (body)) == SUBREG
734
                && REG_P (SUBREG_REG (SET_DEST (body)))
735
                && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
736
                      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
737
                    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
738
                         + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
739
          && reg_overlap_mentioned_p (x, SET_DEST (body)))
740
        return 1;
741
      return 0;
742
 
743
    case ASM_OPERANDS:
744
      for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
745
        if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
746
          return 1;
747
      return 0;
748
 
749
    case CALL:
750
    case USE:
751
    case IF_THEN_ELSE:
752
      return reg_overlap_mentioned_p (x, body);
753
 
754
    case TRAP_IF:
755
      return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
756
 
757
    case PREFETCH:
758
      return reg_overlap_mentioned_p (x, XEXP (body, 0));
759
 
760
    case UNSPEC:
761
    case UNSPEC_VOLATILE:
762
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
763
        if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
764
          return 1;
765
      return 0;
766
 
767
    case PARALLEL:
768
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
769
        if (reg_referenced_p (x, XVECEXP (body, 0, i)))
770
          return 1;
771
      return 0;
772
 
773
    case CLOBBER:
774
      if (MEM_P (XEXP (body, 0)))
775
        if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
776
          return 1;
777
      return 0;
778
 
779
    case COND_EXEC:
780
      if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
781
        return 1;
782
      return reg_referenced_p (x, COND_EXEC_CODE (body));
783
 
784
    default:
785
      return 0;
786
    }
787
}
788
 
789
/* Nonzero if register REG is set or clobbered in an insn between
790
   FROM_INSN and TO_INSN (exclusive of those two).  */
791
 
792
int
793
reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
794
{
795
  rtx insn;
796
 
797
  if (from_insn == to_insn)
798
    return 0;
799
 
800
  for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
801
    if (INSN_P (insn) && reg_set_p (reg, insn))
802
      return 1;
803
  return 0;
804
}
805
 
806
/* Internals of reg_set_between_p.  */
807
int
808
reg_set_p (rtx reg, rtx insn)
809
{
810
  /* We can be passed an insn or part of one.  If we are passed an insn,
811
     check if a side-effect of the insn clobbers REG.  */
812
  if (INSN_P (insn)
813
      && (FIND_REG_INC_NOTE (insn, reg)
814
          || (CALL_P (insn)
815
              && ((REG_P (reg)
816
                   && REGNO (reg) < FIRST_PSEUDO_REGISTER
817
                   && TEST_HARD_REG_BIT (regs_invalidated_by_call,
818
                                         REGNO (reg)))
819
                  || MEM_P (reg)
820
                  || find_reg_fusage (insn, CLOBBER, reg)))))
821
    return 1;
822
 
823
  return set_of (reg, insn) != NULL_RTX;
824
}
825
 
826
/* Similar to reg_set_between_p, but check all registers in X.  Return 0
827
   only if none of them are modified between START and END.  Return 1 if
828
   X contains a MEM; this routine does usememory aliasing.  */
829
 
830
int
831
modified_between_p (rtx x, rtx start, rtx end)
832
{
833
  enum rtx_code code = GET_CODE (x);
834
  const char *fmt;
835
  int i, j;
836
  rtx insn;
837
 
838
  if (start == end)
839
    return 0;
840
 
841
  switch (code)
842
    {
843
    case CONST_INT:
844
    case CONST_DOUBLE:
845
    case CONST_VECTOR:
846
    case CONST:
847
    case SYMBOL_REF:
848
    case LABEL_REF:
849
      return 0;
850
 
851
    case PC:
852
    case CC0:
853
      return 1;
854
 
855
    case MEM:
856
      if (modified_between_p (XEXP (x, 0), start, end))
857
        return 1;
858
      if (MEM_READONLY_P (x))
859
        return 0;
860
      for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
861
        if (memory_modified_in_insn_p (x, insn))
862
          return 1;
863
      return 0;
864
      break;
865
 
866
    case REG:
867
      return reg_set_between_p (x, start, end);
868
 
869
    default:
870
      break;
871
    }
872
 
873
  fmt = GET_RTX_FORMAT (code);
874
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
875
    {
876
      if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
877
        return 1;
878
 
879
      else if (fmt[i] == 'E')
880
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
881
          if (modified_between_p (XVECEXP (x, i, j), start, end))
882
            return 1;
883
    }
884
 
885
  return 0;
886
}
887
 
888
/* Similar to reg_set_p, but check all registers in X.  Return 0 only if none
889
   of them are modified in INSN.  Return 1 if X contains a MEM; this routine
890
   does use memory aliasing.  */
891
 
892
int
893
modified_in_p (rtx x, rtx insn)
894
{
895
  enum rtx_code code = GET_CODE (x);
896
  const char *fmt;
897
  int i, j;
898
 
899
  switch (code)
900
    {
901
    case CONST_INT:
902
    case CONST_DOUBLE:
903
    case CONST_VECTOR:
904
    case CONST:
905
    case SYMBOL_REF:
906
    case LABEL_REF:
907
      return 0;
908
 
909
    case PC:
910
    case CC0:
911
      return 1;
912
 
913
    case MEM:
914
      if (modified_in_p (XEXP (x, 0), insn))
915
        return 1;
916
      if (MEM_READONLY_P (x))
917
        return 0;
918
      if (memory_modified_in_insn_p (x, insn))
919
        return 1;
920
      return 0;
921
      break;
922
 
923
    case REG:
924
      return reg_set_p (x, insn);
925
 
926
    default:
927
      break;
928
    }
929
 
930
  fmt = GET_RTX_FORMAT (code);
931
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
932
    {
933
      if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
934
        return 1;
935
 
936
      else if (fmt[i] == 'E')
937
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
938
          if (modified_in_p (XVECEXP (x, i, j), insn))
939
            return 1;
940
    }
941
 
942
  return 0;
943
}
944
 
945
/* Helper function for set_of.  */
946
struct set_of_data
947
  {
948
    rtx found;
949
    rtx pat;
950
  };
951
 
952
static void
953
set_of_1 (rtx x, rtx pat, void *data1)
954
{
955
   struct set_of_data *data = (struct set_of_data *) (data1);
956
   if (rtx_equal_p (x, data->pat)
957
       || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
958
     data->found = pat;
959
}
960
 
961
/* Give an INSN, return a SET or CLOBBER expression that does modify PAT
962
   (either directly or via STRICT_LOW_PART and similar modifiers).  */
963
rtx
964
set_of (rtx pat, rtx insn)
965
{
966
  struct set_of_data data;
967
  data.found = NULL_RTX;
968
  data.pat = pat;
969
  note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
970
  return data.found;
971
}
972
 
973
/* Given an INSN, return a SET expression if this insn has only a single SET.
974
   It may also have CLOBBERs, USEs, or SET whose output
975
   will not be used, which we ignore.  */
976
 
977
rtx
978
single_set_2 (rtx insn, rtx pat)
979
{
980
  rtx set = NULL;
981
  int set_verified = 1;
982
  int i;
983
 
984
  if (GET_CODE (pat) == PARALLEL)
985
    {
986
      for (i = 0; i < XVECLEN (pat, 0); i++)
987
        {
988
          rtx sub = XVECEXP (pat, 0, i);
989
          switch (GET_CODE (sub))
990
            {
991
            case USE:
992
            case CLOBBER:
993
              break;
994
 
995
            case SET:
996
              /* We can consider insns having multiple sets, where all
997
                 but one are dead as single set insns.  In common case
998
                 only single set is present in the pattern so we want
999
                 to avoid checking for REG_UNUSED notes unless necessary.
1000
 
1001
                 When we reach set first time, we just expect this is
1002
                 the single set we are looking for and only when more
1003
                 sets are found in the insn, we check them.  */
1004
              if (!set_verified)
1005
                {
1006
                  if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1007
                      && !side_effects_p (set))
1008
                    set = NULL;
1009
                  else
1010
                    set_verified = 1;
1011
                }
1012
              if (!set)
1013
                set = sub, set_verified = 0;
1014
              else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1015
                       || side_effects_p (sub))
1016
                return NULL_RTX;
1017
              break;
1018
 
1019
            default:
1020
              return NULL_RTX;
1021
            }
1022
        }
1023
    }
1024
  return set;
1025
}
1026
 
1027
/* Given an INSN, return nonzero if it has more than one SET, else return
1028
   zero.  */
1029
 
1030
int
1031
multiple_sets (rtx insn)
1032
{
1033
  int found;
1034
  int i;
1035
 
1036
  /* INSN must be an insn.  */
1037
  if (! INSN_P (insn))
1038
    return 0;
1039
 
1040
  /* Only a PARALLEL can have multiple SETs.  */
1041
  if (GET_CODE (PATTERN (insn)) == PARALLEL)
1042
    {
1043
      for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1044
        if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1045
          {
1046
            /* If we have already found a SET, then return now.  */
1047
            if (found)
1048
              return 1;
1049
            else
1050
              found = 1;
1051
          }
1052
    }
1053
 
1054
  /* Either zero or one SET.  */
1055
  return 0;
1056
}
1057
 
1058
/* Return nonzero if the destination of SET equals the source
1059
   and there are no side effects.  */
1060
 
1061
int
1062
set_noop_p (rtx set)
1063
{
1064
  rtx src = SET_SRC (set);
1065
  rtx dst = SET_DEST (set);
1066
 
1067
  if (dst == pc_rtx && src == pc_rtx)
1068
    return 1;
1069
 
1070
  if (MEM_P (dst) && MEM_P (src))
1071
    return rtx_equal_p (dst, src) && !side_effects_p (dst);
1072
 
1073
  if (GET_CODE (dst) == ZERO_EXTRACT)
1074
    return rtx_equal_p (XEXP (dst, 0), src)
1075
           && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1076
           && !side_effects_p (src);
1077
 
1078
  if (GET_CODE (dst) == STRICT_LOW_PART)
1079
    dst = XEXP (dst, 0);
1080
 
1081
  if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1082
    {
1083
      if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1084
        return 0;
1085
      src = SUBREG_REG (src);
1086
      dst = SUBREG_REG (dst);
1087
    }
1088
 
1089
  return (REG_P (src) && REG_P (dst)
1090
          && REGNO (src) == REGNO (dst));
1091
}
1092
 
1093
/* Return nonzero if an insn consists only of SETs, each of which only sets a
1094
   value to itself.  */
1095
 
1096
int
1097
noop_move_p (rtx insn)
1098
{
1099
  rtx pat = PATTERN (insn);
1100
 
1101
  if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1102
    return 1;
1103
 
1104
  /* Insns carrying these notes are useful later on.  */
1105
  if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1106
    return 0;
1107
 
1108
  /* For now treat an insn with a REG_RETVAL note as a
1109
     a special insn which should not be considered a no-op.  */
1110
  if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1111
    return 0;
1112
 
1113
  if (GET_CODE (pat) == SET && set_noop_p (pat))
1114
    return 1;
1115
 
1116
  if (GET_CODE (pat) == PARALLEL)
1117
    {
1118
      int i;
1119
      /* If nothing but SETs of registers to themselves,
1120
         this insn can also be deleted.  */
1121
      for (i = 0; i < XVECLEN (pat, 0); i++)
1122
        {
1123
          rtx tem = XVECEXP (pat, 0, i);
1124
 
1125
          if (GET_CODE (tem) == USE
1126
              || GET_CODE (tem) == CLOBBER)
1127
            continue;
1128
 
1129
          if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1130
            return 0;
1131
        }
1132
 
1133
      return 1;
1134
    }
1135
  return 0;
1136
}
1137
 
1138
 
1139
/* Return the last thing that X was assigned from before *PINSN.  If VALID_TO
1140
   is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1141
   If the object was modified, if we hit a partial assignment to X, or hit a
1142
   CODE_LABEL first, return X.  If we found an assignment, update *PINSN to
1143
   point to it.  ALLOW_HWREG is set to 1 if hardware registers are allowed to
1144
   be the src.  */
1145
 
1146
rtx
1147
find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1148
{
1149
  rtx p;
1150
 
1151
  for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1152
       p = PREV_INSN (p))
1153
    if (INSN_P (p))
1154
      {
1155
        rtx set = single_set (p);
1156
        rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1157
 
1158
        if (set && rtx_equal_p (x, SET_DEST (set)))
1159
          {
1160
            rtx src = SET_SRC (set);
1161
 
1162
            if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1163
              src = XEXP (note, 0);
1164
 
1165
            if ((valid_to == NULL_RTX
1166
                 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1167
                /* Reject hard registers because we don't usually want
1168
                   to use them; we'd rather use a pseudo.  */
1169
                && (! (REG_P (src)
1170
                      && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1171
              {
1172
                *pinsn = p;
1173
                return src;
1174
              }
1175
          }
1176
 
1177
        /* If set in non-simple way, we don't have a value.  */
1178
        if (reg_set_p (x, p))
1179
          break;
1180
      }
1181
 
1182
  return x;
1183
}
1184
 
1185
/* Return nonzero if register in range [REGNO, ENDREGNO)
1186
   appears either explicitly or implicitly in X
1187
   other than being stored into.
1188
 
1189
   References contained within the substructure at LOC do not count.
1190
   LOC may be zero, meaning don't ignore anything.  */
1191
 
1192
int
1193
refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1194
                   rtx *loc)
1195
{
1196
  int i;
1197
  unsigned int x_regno;
1198
  RTX_CODE code;
1199
  const char *fmt;
1200
 
1201
 repeat:
1202
  /* The contents of a REG_NONNEG note is always zero, so we must come here
1203
     upon repeat in case the last REG_NOTE is a REG_NONNEG note.  */
1204
  if (x == 0)
1205
    return 0;
1206
 
1207
  code = GET_CODE (x);
1208
 
1209
  switch (code)
1210
    {
1211
    case REG:
1212
      x_regno = REGNO (x);
1213
 
1214
      /* If we modifying the stack, frame, or argument pointer, it will
1215
         clobber a virtual register.  In fact, we could be more precise,
1216
         but it isn't worth it.  */
1217
      if ((x_regno == STACK_POINTER_REGNUM
1218
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1219
           || x_regno == ARG_POINTER_REGNUM
1220
#endif
1221
           || x_regno == FRAME_POINTER_REGNUM)
1222
          && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1223
        return 1;
1224
 
1225
      return (endregno > x_regno
1226
              && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1227
                                    ? hard_regno_nregs[x_regno][GET_MODE (x)]
1228
                              : 1));
1229
 
1230
    case SUBREG:
1231
      /* If this is a SUBREG of a hard reg, we can see exactly which
1232
         registers are being modified.  Otherwise, handle normally.  */
1233
      if (REG_P (SUBREG_REG (x))
1234
          && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1235
        {
1236
          unsigned int inner_regno = subreg_regno (x);
1237
          unsigned int inner_endregno
1238
            = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1239
                             ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1240
 
1241
          return endregno > inner_regno && regno < inner_endregno;
1242
        }
1243
      break;
1244
 
1245
    case CLOBBER:
1246
    case SET:
1247
      if (&SET_DEST (x) != loc
1248
          /* Note setting a SUBREG counts as referring to the REG it is in for
1249
             a pseudo but not for hard registers since we can
1250
             treat each word individually.  */
1251
          && ((GET_CODE (SET_DEST (x)) == SUBREG
1252
               && loc != &SUBREG_REG (SET_DEST (x))
1253
               && REG_P (SUBREG_REG (SET_DEST (x)))
1254
               && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1255
               && refers_to_regno_p (regno, endregno,
1256
                                     SUBREG_REG (SET_DEST (x)), loc))
1257
              || (!REG_P (SET_DEST (x))
1258
                  && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1259
        return 1;
1260
 
1261
      if (code == CLOBBER || loc == &SET_SRC (x))
1262
        return 0;
1263
      x = SET_SRC (x);
1264
      goto repeat;
1265
 
1266
    default:
1267
      break;
1268
    }
1269
 
1270
  /* X does not match, so try its subexpressions.  */
1271
 
1272
  fmt = GET_RTX_FORMAT (code);
1273
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1274
    {
1275
      if (fmt[i] == 'e' && loc != &XEXP (x, i))
1276
        {
1277
          if (i == 0)
1278
            {
1279
              x = XEXP (x, 0);
1280
              goto repeat;
1281
            }
1282
          else
1283
            if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1284
              return 1;
1285
        }
1286
      else if (fmt[i] == 'E')
1287
        {
1288
          int j;
1289
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1290
            if (loc != &XVECEXP (x, i, j)
1291
                && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1292
              return 1;
1293
        }
1294
    }
1295
  return 0;
1296
}
1297
 
1298
/* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
1299
   we check if any register number in X conflicts with the relevant register
1300
   numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
1301
   contains a MEM (we don't bother checking for memory addresses that can't
1302
   conflict because we expect this to be a rare case.  */
1303
 
1304
int
1305
reg_overlap_mentioned_p (rtx x, rtx in)
1306
{
1307
  unsigned int regno, endregno;
1308
 
1309
  /* If either argument is a constant, then modifying X can not
1310
     affect IN.  Here we look at IN, we can profitably combine
1311
     CONSTANT_P (x) with the switch statement below.  */
1312
  if (CONSTANT_P (in))
1313
    return 0;
1314
 
1315
 recurse:
1316
  switch (GET_CODE (x))
1317
    {
1318
    case STRICT_LOW_PART:
1319
    case ZERO_EXTRACT:
1320
    case SIGN_EXTRACT:
1321
      /* Overly conservative.  */
1322
      x = XEXP (x, 0);
1323
      goto recurse;
1324
 
1325
    case SUBREG:
1326
      regno = REGNO (SUBREG_REG (x));
1327
      if (regno < FIRST_PSEUDO_REGISTER)
1328
        regno = subreg_regno (x);
1329
      goto do_reg;
1330
 
1331
    case REG:
1332
      regno = REGNO (x);
1333
    do_reg:
1334
      endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1335
                          ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1336
      return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1337
 
1338
    case MEM:
1339
      {
1340
        const char *fmt;
1341
        int i;
1342
 
1343
        if (MEM_P (in))
1344
          return 1;
1345
 
1346
        fmt = GET_RTX_FORMAT (GET_CODE (in));
1347
        for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1348
          if (fmt[i] == 'e')
1349
            {
1350
              if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1351
                return 1;
1352
            }
1353
          else if (fmt[i] == 'E')
1354
            {
1355
              int j;
1356
              for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1357
                if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1358
                  return 1;
1359
            }
1360
 
1361
        return 0;
1362
      }
1363
 
1364
    case SCRATCH:
1365
    case PC:
1366
    case CC0:
1367
      return reg_mentioned_p (x, in);
1368
 
1369
    case PARALLEL:
1370
      {
1371
        int i;
1372
 
1373
        /* If any register in here refers to it we return true.  */
1374
        for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1375
          if (XEXP (XVECEXP (x, 0, i), 0) != 0
1376
              && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1377
            return 1;
1378
        return 0;
1379
      }
1380
 
1381
    default:
1382
      gcc_assert (CONSTANT_P (x));
1383
      return 0;
1384
    }
1385
}
1386
 
1387
/* Call FUN on each register or MEM that is stored into or clobbered by X.
1388
   (X would be the pattern of an insn).
1389
   FUN receives two arguments:
1390
     the REG, MEM, CC0 or PC being stored in or clobbered,
1391
     the SET or CLOBBER rtx that does the store.
1392
 
1393
  If the item being stored in or clobbered is a SUBREG of a hard register,
1394
  the SUBREG will be passed.  */
1395
 
1396
void
1397
note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1398
{
1399
  int i;
1400
 
1401
  if (GET_CODE (x) == COND_EXEC)
1402
    x = COND_EXEC_CODE (x);
1403
 
1404
  if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1405
    {
1406
      rtx dest = SET_DEST (x);
1407
 
1408
      while ((GET_CODE (dest) == SUBREG
1409
              && (!REG_P (SUBREG_REG (dest))
1410
                  || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1411
             || GET_CODE (dest) == ZERO_EXTRACT
1412
             || GET_CODE (dest) == STRICT_LOW_PART)
1413
        dest = XEXP (dest, 0);
1414
 
1415
      /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1416
         each of whose first operand is a register.  */
1417
      if (GET_CODE (dest) == PARALLEL)
1418
        {
1419
          for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1420
            if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1421
              (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1422
        }
1423
      else
1424
        (*fun) (dest, x, data);
1425
    }
1426
 
1427
  else if (GET_CODE (x) == PARALLEL)
1428
    for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1429
      note_stores (XVECEXP (x, 0, i), fun, data);
1430
}
1431
 
1432
/* Like notes_stores, but call FUN for each expression that is being
1433
   referenced in PBODY, a pointer to the PATTERN of an insn.  We only call
1434
   FUN for each expression, not any interior subexpressions.  FUN receives a
1435
   pointer to the expression and the DATA passed to this function.
1436
 
1437
   Note that this is not quite the same test as that done in reg_referenced_p
1438
   since that considers something as being referenced if it is being
1439
   partially set, while we do not.  */
1440
 
1441
void
1442
note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1443
{
1444
  rtx body = *pbody;
1445
  int i;
1446
 
1447
  switch (GET_CODE (body))
1448
    {
1449
    case COND_EXEC:
1450
      (*fun) (&COND_EXEC_TEST (body), data);
1451
      note_uses (&COND_EXEC_CODE (body), fun, data);
1452
      return;
1453
 
1454
    case PARALLEL:
1455
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1456
        note_uses (&XVECEXP (body, 0, i), fun, data);
1457
      return;
1458
 
1459
    case USE:
1460
      (*fun) (&XEXP (body, 0), data);
1461
      return;
1462
 
1463
    case ASM_OPERANDS:
1464
      for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1465
        (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1466
      return;
1467
 
1468
    case TRAP_IF:
1469
      (*fun) (&TRAP_CONDITION (body), data);
1470
      return;
1471
 
1472
    case PREFETCH:
1473
      (*fun) (&XEXP (body, 0), data);
1474
      return;
1475
 
1476
    case UNSPEC:
1477
    case UNSPEC_VOLATILE:
1478
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1479
        (*fun) (&XVECEXP (body, 0, i), data);
1480
      return;
1481
 
1482
    case CLOBBER:
1483
      if (MEM_P (XEXP (body, 0)))
1484
        (*fun) (&XEXP (XEXP (body, 0), 0), data);
1485
      return;
1486
 
1487
    case SET:
1488
      {
1489
        rtx dest = SET_DEST (body);
1490
 
1491
        /* For sets we replace everything in source plus registers in memory
1492
           expression in store and operands of a ZERO_EXTRACT.  */
1493
        (*fun) (&SET_SRC (body), data);
1494
 
1495
        if (GET_CODE (dest) == ZERO_EXTRACT)
1496
          {
1497
            (*fun) (&XEXP (dest, 1), data);
1498
            (*fun) (&XEXP (dest, 2), data);
1499
          }
1500
 
1501
        while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1502
          dest = XEXP (dest, 0);
1503
 
1504
        if (MEM_P (dest))
1505
          (*fun) (&XEXP (dest, 0), data);
1506
      }
1507
      return;
1508
 
1509
    default:
1510
      /* All the other possibilities never store.  */
1511
      (*fun) (pbody, data);
1512
      return;
1513
    }
1514
}
1515
 
1516
/* Return nonzero if X's old contents don't survive after INSN.
1517
   This will be true if X is (cc0) or if X is a register and
1518
   X dies in INSN or because INSN entirely sets X.
1519
 
1520
   "Entirely set" means set directly and not through a SUBREG, or
1521
   ZERO_EXTRACT, so no trace of the old contents remains.
1522
   Likewise, REG_INC does not count.
1523
 
1524
   REG may be a hard or pseudo reg.  Renumbering is not taken into account,
1525
   but for this use that makes no difference, since regs don't overlap
1526
   during their lifetimes.  Therefore, this function may be used
1527
   at any time after deaths have been computed (in flow.c).
1528
 
1529
   If REG is a hard reg that occupies multiple machine registers, this
1530
   function will only return 1 if each of those registers will be replaced
1531
   by INSN.  */
1532
 
1533
int
1534
dead_or_set_p (rtx insn, rtx x)
1535
{
1536
  unsigned int regno, last_regno;
1537
  unsigned int i;
1538
 
1539
  /* Can't use cc0_rtx below since this file is used by genattrtab.c.  */
1540
  if (GET_CODE (x) == CC0)
1541
    return 1;
1542
 
1543
  gcc_assert (REG_P (x));
1544
 
1545
  regno = REGNO (x);
1546
  last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1547
                : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1548
 
1549
  for (i = regno; i <= last_regno; i++)
1550
    if (! dead_or_set_regno_p (insn, i))
1551
      return 0;
1552
 
1553
  return 1;
1554
}
1555
 
1556
/* Return TRUE iff DEST is a register or subreg of a register and
1557
   doesn't change the number of words of the inner register, and any
1558
   part of the register is TEST_REGNO.  */
1559
 
1560
static bool
1561
covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1562
{
1563
  unsigned int regno, endregno;
1564
 
1565
  if (GET_CODE (dest) == SUBREG
1566
      && (((GET_MODE_SIZE (GET_MODE (dest))
1567
            + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1568
          == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1569
               + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1570
    dest = SUBREG_REG (dest);
1571
 
1572
  if (!REG_P (dest))
1573
    return false;
1574
 
1575
  regno = REGNO (dest);
1576
  endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1577
              : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1578
  return (test_regno >= regno && test_regno < endregno);
1579
}
1580
 
1581
/* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1582
   any member matches the covers_regno_no_parallel_p criteria.  */
1583
 
1584
static bool
1585
covers_regno_p (rtx dest, unsigned int test_regno)
1586
{
1587
  if (GET_CODE (dest) == PARALLEL)
1588
    {
1589
      /* Some targets place small structures in registers for return
1590
         values of functions, and those registers are wrapped in
1591
         PARALLELs that we may see as the destination of a SET.  */
1592
      int i;
1593
 
1594
      for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1595
        {
1596
          rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1597
          if (inner != NULL_RTX
1598
              && covers_regno_no_parallel_p (inner, test_regno))
1599
            return true;
1600
        }
1601
 
1602
      return false;
1603
    }
1604
  else
1605
    return covers_regno_no_parallel_p (dest, test_regno);
1606
}
1607
 
1608
/* Utility function for dead_or_set_p to check an individual register.  Also
1609
   called from flow.c.  */
1610
 
1611
int
1612
dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1613
{
1614
  rtx pattern;
1615
 
1616
  /* See if there is a death note for something that includes TEST_REGNO.  */
1617
  if (find_regno_note (insn, REG_DEAD, test_regno))
1618
    return 1;
1619
 
1620
  if (CALL_P (insn)
1621
      && find_regno_fusage (insn, CLOBBER, test_regno))
1622
    return 1;
1623
 
1624
  pattern = PATTERN (insn);
1625
 
1626
  if (GET_CODE (pattern) == COND_EXEC)
1627
    pattern = COND_EXEC_CODE (pattern);
1628
 
1629
  if (GET_CODE (pattern) == SET)
1630
    return covers_regno_p (SET_DEST (pattern), test_regno);
1631
  else if (GET_CODE (pattern) == PARALLEL)
1632
    {
1633
      int i;
1634
 
1635
      for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1636
        {
1637
          rtx body = XVECEXP (pattern, 0, i);
1638
 
1639
          if (GET_CODE (body) == COND_EXEC)
1640
            body = COND_EXEC_CODE (body);
1641
 
1642
          if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1643
              && covers_regno_p (SET_DEST (body), test_regno))
1644
            return 1;
1645
        }
1646
    }
1647
 
1648
  return 0;
1649
}
1650
 
1651
/* Return the reg-note of kind KIND in insn INSN, if there is one.
1652
   If DATUM is nonzero, look for one whose datum is DATUM.  */
1653
 
1654
rtx
1655
find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1656
{
1657
  rtx link;
1658
 
1659
  /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1660
  if (! INSN_P (insn))
1661
    return 0;
1662
  if (datum == 0)
1663
    {
1664
      for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1665
        if (REG_NOTE_KIND (link) == kind)
1666
          return link;
1667
      return 0;
1668
    }
1669
 
1670
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1671
    if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1672
      return link;
1673
  return 0;
1674
}
1675
 
1676
/* Return the reg-note of kind KIND in insn INSN which applies to register
1677
   number REGNO, if any.  Return 0 if there is no such reg-note.  Note that
1678
   the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1679
   it might be the case that the note overlaps REGNO.  */
1680
 
1681
rtx
1682
find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1683
{
1684
  rtx link;
1685
 
1686
  /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1687
  if (! INSN_P (insn))
1688
    return 0;
1689
 
1690
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1691
    if (REG_NOTE_KIND (link) == kind
1692
        /* Verify that it is a register, so that scratch and MEM won't cause a
1693
           problem here.  */
1694
        && REG_P (XEXP (link, 0))
1695
        && REGNO (XEXP (link, 0)) <= regno
1696
        && ((REGNO (XEXP (link, 0))
1697
             + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1698
                : hard_regno_nregs[REGNO (XEXP (link, 0))]
1699
                                  [GET_MODE (XEXP (link, 0))]))
1700
            > regno))
1701
      return link;
1702
  return 0;
1703
}
1704
 
1705
/* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1706
   has such a note.  */
1707
 
1708
rtx
1709
find_reg_equal_equiv_note (rtx insn)
1710
{
1711
  rtx link;
1712
 
1713
  if (!INSN_P (insn))
1714
    return 0;
1715
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1716
    if (REG_NOTE_KIND (link) == REG_EQUAL
1717
        || REG_NOTE_KIND (link) == REG_EQUIV)
1718
      {
1719
        if (single_set (insn) == 0)
1720
          return 0;
1721
        return link;
1722
      }
1723
  return NULL;
1724
}
1725
 
1726
/* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1727
   in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1728
 
1729
int
1730
find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1731
{
1732
  /* If it's not a CALL_INSN, it can't possibly have a
1733
     CALL_INSN_FUNCTION_USAGE field, so don't bother checking.  */
1734
  if (!CALL_P (insn))
1735
    return 0;
1736
 
1737
  gcc_assert (datum);
1738
 
1739
  if (!REG_P (datum))
1740
    {
1741
      rtx link;
1742
 
1743
      for (link = CALL_INSN_FUNCTION_USAGE (insn);
1744
           link;
1745
           link = XEXP (link, 1))
1746
        if (GET_CODE (XEXP (link, 0)) == code
1747
            && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1748
          return 1;
1749
    }
1750
  else
1751
    {
1752
      unsigned int regno = REGNO (datum);
1753
 
1754
      /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1755
         to pseudo registers, so don't bother checking.  */
1756
 
1757
      if (regno < FIRST_PSEUDO_REGISTER)
1758
        {
1759
          unsigned int end_regno
1760
            = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1761
          unsigned int i;
1762
 
1763
          for (i = regno; i < end_regno; i++)
1764
            if (find_regno_fusage (insn, code, i))
1765
              return 1;
1766
        }
1767
    }
1768
 
1769
  return 0;
1770
}
1771
 
1772
/* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1773
   in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1774
 
1775
int
1776
find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1777
{
1778
  rtx link;
1779
 
1780
  /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1781
     to pseudo registers, so don't bother checking.  */
1782
 
1783
  if (regno >= FIRST_PSEUDO_REGISTER
1784
      || !CALL_P (insn) )
1785
    return 0;
1786
 
1787
  for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1788
    {
1789
      unsigned int regnote;
1790
      rtx op, reg;
1791
 
1792
      if (GET_CODE (op = XEXP (link, 0)) == code
1793
          && REG_P (reg = XEXP (op, 0))
1794
          && (regnote = REGNO (reg)) <= regno
1795
          && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1796
        return 1;
1797
    }
1798
 
1799
  return 0;
1800
}
1801
 
1802
/* Return true if INSN is a call to a pure function.  */
1803
 
1804
int
1805
pure_call_p (rtx insn)
1806
{
1807
  rtx link;
1808
 
1809
  if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1810
    return 0;
1811
 
1812
  /* Look for the note that differentiates const and pure functions.  */
1813
  for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1814
    {
1815
      rtx u, m;
1816
 
1817
      if (GET_CODE (u = XEXP (link, 0)) == USE
1818
          && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1819
          && GET_CODE (XEXP (m, 0)) == SCRATCH)
1820
        return 1;
1821
    }
1822
 
1823
  return 0;
1824
}
1825
 
1826
/* Remove register note NOTE from the REG_NOTES of INSN.  */
1827
 
1828
void
1829
remove_note (rtx insn, rtx note)
1830
{
1831
  rtx link;
1832
 
1833
  if (note == NULL_RTX)
1834
    return;
1835
 
1836
  if (REG_NOTES (insn) == note)
1837
    {
1838
      REG_NOTES (insn) = XEXP (note, 1);
1839
      return;
1840
    }
1841
 
1842
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1843
    if (XEXP (link, 1) == note)
1844
      {
1845
        XEXP (link, 1) = XEXP (note, 1);
1846
        return;
1847
      }
1848
 
1849
  gcc_unreachable ();
1850
}
1851
 
1852
/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1853
   return 1 if it is found.  A simple equality test is used to determine if
1854
   NODE matches.  */
1855
 
1856
int
1857
in_expr_list_p (rtx listp, rtx node)
1858
{
1859
  rtx x;
1860
 
1861
  for (x = listp; x; x = XEXP (x, 1))
1862
    if (node == XEXP (x, 0))
1863
      return 1;
1864
 
1865
  return 0;
1866
}
1867
 
1868
/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1869
   remove that entry from the list if it is found.
1870
 
1871
   A simple equality test is used to determine if NODE matches.  */
1872
 
1873
void
1874
remove_node_from_expr_list (rtx node, rtx *listp)
1875
{
1876
  rtx temp = *listp;
1877
  rtx prev = NULL_RTX;
1878
 
1879
  while (temp)
1880
    {
1881
      if (node == XEXP (temp, 0))
1882
        {
1883
          /* Splice the node out of the list.  */
1884
          if (prev)
1885
            XEXP (prev, 1) = XEXP (temp, 1);
1886
          else
1887
            *listp = XEXP (temp, 1);
1888
 
1889
          return;
1890
        }
1891
 
1892
      prev = temp;
1893
      temp = XEXP (temp, 1);
1894
    }
1895
}
1896
 
1897
/* Nonzero if X contains any volatile instructions.  These are instructions
1898
   which may cause unpredictable machine state instructions, and thus no
1899
   instructions should be moved or combined across them.  This includes
1900
   only volatile asms and UNSPEC_VOLATILE instructions.  */
1901
 
1902
int
1903
volatile_insn_p (rtx x)
1904
{
1905
  RTX_CODE code;
1906
 
1907
  code = GET_CODE (x);
1908
  switch (code)
1909
    {
1910
    case LABEL_REF:
1911
    case SYMBOL_REF:
1912
    case CONST_INT:
1913
    case CONST:
1914
    case CONST_DOUBLE:
1915
    case CONST_VECTOR:
1916
    case CC0:
1917
    case PC:
1918
    case REG:
1919
    case SCRATCH:
1920
    case CLOBBER:
1921
    case ADDR_VEC:
1922
    case ADDR_DIFF_VEC:
1923
    case CALL:
1924
    case MEM:
1925
      return 0;
1926
 
1927
    case UNSPEC_VOLATILE:
1928
 /* case TRAP_IF: This isn't clear yet.  */
1929
      return 1;
1930
 
1931
    case ASM_INPUT:
1932
    case ASM_OPERANDS:
1933
      if (MEM_VOLATILE_P (x))
1934
        return 1;
1935
 
1936
    default:
1937
      break;
1938
    }
1939
 
1940
  /* Recursively scan the operands of this expression.  */
1941
 
1942
  {
1943
    const char *fmt = GET_RTX_FORMAT (code);
1944
    int i;
1945
 
1946
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1947
      {
1948
        if (fmt[i] == 'e')
1949
          {
1950
            if (volatile_insn_p (XEXP (x, i)))
1951
              return 1;
1952
          }
1953
        else if (fmt[i] == 'E')
1954
          {
1955
            int j;
1956
            for (j = 0; j < XVECLEN (x, i); j++)
1957
              if (volatile_insn_p (XVECEXP (x, i, j)))
1958
                return 1;
1959
          }
1960
      }
1961
  }
1962
  return 0;
1963
}
1964
 
1965
/* Nonzero if X contains any volatile memory references
1966
   UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions.  */
1967
 
1968
int
1969
volatile_refs_p (rtx x)
1970
{
1971
  RTX_CODE code;
1972
 
1973
  code = GET_CODE (x);
1974
  switch (code)
1975
    {
1976
    case LABEL_REF:
1977
    case SYMBOL_REF:
1978
    case CONST_INT:
1979
    case CONST:
1980
    case CONST_DOUBLE:
1981
    case CONST_VECTOR:
1982
    case CC0:
1983
    case PC:
1984
    case REG:
1985
    case SCRATCH:
1986
    case CLOBBER:
1987
    case ADDR_VEC:
1988
    case ADDR_DIFF_VEC:
1989
      return 0;
1990
 
1991
    case UNSPEC_VOLATILE:
1992
      return 1;
1993
 
1994
    case MEM:
1995
    case ASM_INPUT:
1996
    case ASM_OPERANDS:
1997
      if (MEM_VOLATILE_P (x))
1998
        return 1;
1999
 
2000
    default:
2001
      break;
2002
    }
2003
 
2004
  /* Recursively scan the operands of this expression.  */
2005
 
2006
  {
2007
    const char *fmt = GET_RTX_FORMAT (code);
2008
    int i;
2009
 
2010
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2011
      {
2012
        if (fmt[i] == 'e')
2013
          {
2014
            if (volatile_refs_p (XEXP (x, i)))
2015
              return 1;
2016
          }
2017
        else if (fmt[i] == 'E')
2018
          {
2019
            int j;
2020
            for (j = 0; j < XVECLEN (x, i); j++)
2021
              if (volatile_refs_p (XVECEXP (x, i, j)))
2022
                return 1;
2023
          }
2024
      }
2025
  }
2026
  return 0;
2027
}
2028
 
2029
/* Similar to above, except that it also rejects register pre- and post-
2030
   incrementing.  */
2031
 
2032
int
2033
side_effects_p (rtx x)
2034
{
2035
  RTX_CODE code;
2036
 
2037
  code = GET_CODE (x);
2038
  switch (code)
2039
    {
2040
    case LABEL_REF:
2041
    case SYMBOL_REF:
2042
    case CONST_INT:
2043
    case CONST:
2044
    case CONST_DOUBLE:
2045
    case CONST_VECTOR:
2046
    case CC0:
2047
    case PC:
2048
    case REG:
2049
    case SCRATCH:
2050
    case ADDR_VEC:
2051
    case ADDR_DIFF_VEC:
2052
      return 0;
2053
 
2054
    case CLOBBER:
2055
      /* Reject CLOBBER with a non-VOID mode.  These are made by combine.c
2056
         when some combination can't be done.  If we see one, don't think
2057
         that we can simplify the expression.  */
2058
      return (GET_MODE (x) != VOIDmode);
2059
 
2060
    case PRE_INC:
2061
    case PRE_DEC:
2062
    case POST_INC:
2063
    case POST_DEC:
2064
    case PRE_MODIFY:
2065
    case POST_MODIFY:
2066
    case CALL:
2067
    case UNSPEC_VOLATILE:
2068
 /* case TRAP_IF: This isn't clear yet.  */
2069
      return 1;
2070
 
2071
    case MEM:
2072
    case ASM_INPUT:
2073
    case ASM_OPERANDS:
2074
      if (MEM_VOLATILE_P (x))
2075
        return 1;
2076
 
2077
    default:
2078
      break;
2079
    }
2080
 
2081
  /* Recursively scan the operands of this expression.  */
2082
 
2083
  {
2084
    const char *fmt = GET_RTX_FORMAT (code);
2085
    int i;
2086
 
2087
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2088
      {
2089
        if (fmt[i] == 'e')
2090
          {
2091
            if (side_effects_p (XEXP (x, i)))
2092
              return 1;
2093
          }
2094
        else if (fmt[i] == 'E')
2095
          {
2096
            int j;
2097
            for (j = 0; j < XVECLEN (x, i); j++)
2098
              if (side_effects_p (XVECEXP (x, i, j)))
2099
                return 1;
2100
          }
2101
      }
2102
  }
2103
  return 0;
2104
}
2105
 
2106
/* Return nonzero if evaluating rtx X might cause a trap.  UNALIGNED_MEMS
2107
   controls whether nonzero is returned for unaligned memory accesses on
2108
   strict alignment machines.  */
2109
 
2110
static int
2111
may_trap_p_1 (rtx x, bool unaligned_mems)
2112
{
2113
  int i;
2114
  enum rtx_code code;
2115
  const char *fmt;
2116
 
2117
  if (x == 0)
2118
    return 0;
2119
  code = GET_CODE (x);
2120
  switch (code)
2121
    {
2122
      /* Handle these cases quickly.  */
2123
    case CONST_INT:
2124
    case CONST_DOUBLE:
2125
    case CONST_VECTOR:
2126
    case SYMBOL_REF:
2127
    case LABEL_REF:
2128
    case CONST:
2129
    case PC:
2130
    case CC0:
2131
    case REG:
2132
    case SCRATCH:
2133
      return 0;
2134
 
2135
    case ASM_INPUT:
2136
    case UNSPEC_VOLATILE:
2137
    case TRAP_IF:
2138
      return 1;
2139
 
2140
    case ASM_OPERANDS:
2141
      return MEM_VOLATILE_P (x);
2142
 
2143
      /* Memory ref can trap unless it's a static var or a stack slot.  */
2144
    case MEM:
2145
      if (MEM_NOTRAP_P (x)
2146
          && (!STRICT_ALIGNMENT || !unaligned_mems))
2147
        return 0;
2148
      return
2149
        rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2150
 
2151
      /* Division by a non-constant might trap.  */
2152
    case DIV:
2153
    case MOD:
2154
    case UDIV:
2155
    case UMOD:
2156
      if (HONOR_SNANS (GET_MODE (x)))
2157
        return 1;
2158
      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2159
        return flag_trapping_math;
2160
      if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2161
        return 1;
2162
      break;
2163
 
2164
    case EXPR_LIST:
2165
      /* An EXPR_LIST is used to represent a function call.  This
2166
         certainly may trap.  */
2167
      return 1;
2168
 
2169
    case GE:
2170
    case GT:
2171
    case LE:
2172
    case LT:
2173
    case LTGT:
2174
    case COMPARE:
2175
      /* Some floating point comparisons may trap.  */
2176
      if (!flag_trapping_math)
2177
        break;
2178
      /* ??? There is no machine independent way to check for tests that trap
2179
         when COMPARE is used, though many targets do make this distinction.
2180
         For instance, sparc uses CCFPE for compares which generate exceptions
2181
         and CCFP for compares which do not generate exceptions.  */
2182
      if (HONOR_NANS (GET_MODE (x)))
2183
        return 1;
2184
      /* But often the compare has some CC mode, so check operand
2185
         modes as well.  */
2186
      if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2187
          || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2188
        return 1;
2189
      break;
2190
 
2191
    case EQ:
2192
    case NE:
2193
      if (HONOR_SNANS (GET_MODE (x)))
2194
        return 1;
2195
      /* Often comparison is CC mode, so check operand modes.  */
2196
      if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2197
          || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2198
        return 1;
2199
      break;
2200
 
2201
    case FIX:
2202
      /* Conversion of floating point might trap.  */
2203
      if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2204
        return 1;
2205
      break;
2206
 
2207
    case NEG:
2208
    case ABS:
2209
    case SUBREG:
2210
      /* These operations don't trap even with floating point.  */
2211
      break;
2212
 
2213
    default:
2214
      /* Any floating arithmetic may trap.  */
2215
      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2216
          && flag_trapping_math)
2217
        return 1;
2218
    }
2219
 
2220
  fmt = GET_RTX_FORMAT (code);
2221
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2222
    {
2223
      if (fmt[i] == 'e')
2224
        {
2225
          if (may_trap_p_1 (XEXP (x, i), unaligned_mems))
2226
            return 1;
2227
        }
2228
      else if (fmt[i] == 'E')
2229
        {
2230
          int j;
2231
          for (j = 0; j < XVECLEN (x, i); j++)
2232
            if (may_trap_p_1 (XVECEXP (x, i, j), unaligned_mems))
2233
              return 1;
2234
        }
2235
    }
2236
  return 0;
2237
}
2238
 
2239
/* Return nonzero if evaluating rtx X might cause a trap.  */
2240
 
2241
int
2242
may_trap_p (rtx x)
2243
{
2244
  return may_trap_p_1 (x, false);
2245
}
2246
 
2247
/* Same as above, but additionally return non-zero if evaluating rtx X might
2248
   cause a fault.  We define a fault for the purpose of this function as a
2249
   erroneous execution condition that cannot be encountered during the normal
2250
   execution of a valid program; the typical example is an unaligned memory
2251
   access on a strict alignment machine.  The compiler guarantees that it
2252
   doesn't generate code that will fault from a valid program, but this
2253
   guarantee doesn't mean anything for individual instructions.  Consider
2254
   the following example:
2255
 
2256
      struct S { int d; union { char *cp; int *ip; }; };
2257
 
2258
      int foo(struct S *s)
2259
      {
2260
        if (s->d == 1)
2261
          return *s->ip;
2262
        else
2263
          return *s->cp;
2264
      }
2265
 
2266
   on a strict alignment machine.  In a valid program, foo will never be
2267
   invoked on a structure for which d is equal to 1 and the underlying
2268
   unique field of the union not aligned on a 4-byte boundary, but the
2269
   expression *s->ip might cause a fault if considered individually.
2270
 
2271
   At the RTL level, potentially problematic expressions will almost always
2272
   verify may_trap_p; for example, the above dereference can be emitted as
2273
   (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2274
   However, suppose that foo is inlined in a caller that causes s->cp to
2275
   point to a local character variable and guarantees that s->d is not set
2276
   to 1; foo may have been effectively translated into pseudo-RTL as:
2277
 
2278
      if ((reg:SI) == 1)
2279
        (set (reg:SI) (mem:SI (%fp - 7)))
2280
      else
2281
        (set (reg:QI) (mem:QI (%fp - 7)))
2282
 
2283
   Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2284
   memory reference to a stack slot, but it will certainly cause a fault
2285
   on a strict alignment machine.  */
2286
 
2287
int
2288
may_trap_or_fault_p (rtx x)
2289
{
2290
  return may_trap_p_1 (x, true);
2291
}
2292
 
2293
/* Return nonzero if X contains a comparison that is not either EQ or NE,
2294
   i.e., an inequality.  */
2295
 
2296
int
2297
inequality_comparisons_p (rtx x)
2298
{
2299
  const char *fmt;
2300
  int len, i;
2301
  enum rtx_code code = GET_CODE (x);
2302
 
2303
  switch (code)
2304
    {
2305
    case REG:
2306
    case SCRATCH:
2307
    case PC:
2308
    case CC0:
2309
    case CONST_INT:
2310
    case CONST_DOUBLE:
2311
    case CONST_VECTOR:
2312
    case CONST:
2313
    case LABEL_REF:
2314
    case SYMBOL_REF:
2315
      return 0;
2316
 
2317
    case LT:
2318
    case LTU:
2319
    case GT:
2320
    case GTU:
2321
    case LE:
2322
    case LEU:
2323
    case GE:
2324
    case GEU:
2325
      return 1;
2326
 
2327
    default:
2328
      break;
2329
    }
2330
 
2331
  len = GET_RTX_LENGTH (code);
2332
  fmt = GET_RTX_FORMAT (code);
2333
 
2334
  for (i = 0; i < len; i++)
2335
    {
2336
      if (fmt[i] == 'e')
2337
        {
2338
          if (inequality_comparisons_p (XEXP (x, i)))
2339
            return 1;
2340
        }
2341
      else if (fmt[i] == 'E')
2342
        {
2343
          int j;
2344
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2345
            if (inequality_comparisons_p (XVECEXP (x, i, j)))
2346
              return 1;
2347
        }
2348
    }
2349
 
2350
  return 0;
2351
}
2352
 
2353
/* Replace any occurrence of FROM in X with TO.  The function does
2354
   not enter into CONST_DOUBLE for the replace.
2355
 
2356
   Note that copying is not done so X must not be shared unless all copies
2357
   are to be modified.  */
2358
 
2359
rtx
2360
replace_rtx (rtx x, rtx from, rtx to)
2361
{
2362
  int i, j;
2363
  const char *fmt;
2364
 
2365
  /* The following prevents loops occurrence when we change MEM in
2366
     CONST_DOUBLE onto the same CONST_DOUBLE.  */
2367
  if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2368
    return x;
2369
 
2370
  if (x == from)
2371
    return to;
2372
 
2373
  /* Allow this function to make replacements in EXPR_LISTs.  */
2374
  if (x == 0)
2375
    return 0;
2376
 
2377
  if (GET_CODE (x) == SUBREG)
2378
    {
2379
      rtx new = replace_rtx (SUBREG_REG (x), from, to);
2380
 
2381
      if (GET_CODE (new) == CONST_INT)
2382
        {
2383
          x = simplify_subreg (GET_MODE (x), new,
2384
                               GET_MODE (SUBREG_REG (x)),
2385
                               SUBREG_BYTE (x));
2386
          gcc_assert (x);
2387
        }
2388
      else
2389
        SUBREG_REG (x) = new;
2390
 
2391
      return x;
2392
    }
2393
  else if (GET_CODE (x) == ZERO_EXTEND)
2394
    {
2395
      rtx new = replace_rtx (XEXP (x, 0), from, to);
2396
 
2397
      if (GET_CODE (new) == CONST_INT)
2398
        {
2399
          x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2400
                                        new, GET_MODE (XEXP (x, 0)));
2401
          gcc_assert (x);
2402
        }
2403
      else
2404
        XEXP (x, 0) = new;
2405
 
2406
      return x;
2407
    }
2408
 
2409
  fmt = GET_RTX_FORMAT (GET_CODE (x));
2410
  for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2411
    {
2412
      if (fmt[i] == 'e')
2413
        XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2414
      else if (fmt[i] == 'E')
2415
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2416
          XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2417
    }
2418
 
2419
  return x;
2420
}
2421
 
2422
/* Throughout the rtx X, replace many registers according to REG_MAP.
2423
   Return the replacement for X (which may be X with altered contents).
2424
   REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2425
   NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2426
 
2427
   We only support REG_MAP entries of REG or SUBREG.  Also, hard registers
2428
   should not be mapped to pseudos or vice versa since validate_change
2429
   is not called.
2430
 
2431
   If REPLACE_DEST is 1, replacements are also done in destinations;
2432
   otherwise, only sources are replaced.  */
2433
 
2434
rtx
2435
replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2436
{
2437
  enum rtx_code code;
2438
  int i;
2439
  const char *fmt;
2440
 
2441
  if (x == 0)
2442
    return x;
2443
 
2444
  code = GET_CODE (x);
2445
  switch (code)
2446
    {
2447
    case SCRATCH:
2448
    case PC:
2449
    case CC0:
2450
    case CONST_INT:
2451
    case CONST_DOUBLE:
2452
    case CONST_VECTOR:
2453
    case CONST:
2454
    case SYMBOL_REF:
2455
    case LABEL_REF:
2456
      return x;
2457
 
2458
    case REG:
2459
      /* Verify that the register has an entry before trying to access it.  */
2460
      if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2461
        {
2462
          /* SUBREGs can't be shared.  Always return a copy to ensure that if
2463
             this replacement occurs more than once then each instance will
2464
             get distinct rtx.  */
2465
          if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2466
            return copy_rtx (reg_map[REGNO (x)]);
2467
          return reg_map[REGNO (x)];
2468
        }
2469
      return x;
2470
 
2471
    case SUBREG:
2472
      /* Prevent making nested SUBREGs.  */
2473
      if (REG_P (SUBREG_REG (x)) && REGNO (SUBREG_REG (x)) < nregs
2474
          && reg_map[REGNO (SUBREG_REG (x))] != 0
2475
          && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2476
        {
2477
          rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2478
          return simplify_gen_subreg (GET_MODE (x), map_val,
2479
                                      GET_MODE (SUBREG_REG (x)),
2480
                                      SUBREG_BYTE (x));
2481
        }
2482
      break;
2483
 
2484
    case SET:
2485
      if (replace_dest)
2486
        SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2487
 
2488
      else if (MEM_P (SET_DEST (x))
2489
               || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2490
        /* Even if we are not to replace destinations, replace register if it
2491
           is CONTAINED in destination (destination is memory or
2492
           STRICT_LOW_PART).  */
2493
        XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2494
                                               reg_map, nregs, 0);
2495
      else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2496
        /* Similarly, for ZERO_EXTRACT we replace all operands.  */
2497
        break;
2498
 
2499
      SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2500
      return x;
2501
 
2502
    default:
2503
      break;
2504
    }
2505
 
2506
  fmt = GET_RTX_FORMAT (code);
2507
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2508
    {
2509
      if (fmt[i] == 'e')
2510
        XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2511
      else if (fmt[i] == 'E')
2512
        {
2513
          int j;
2514
          for (j = 0; j < XVECLEN (x, i); j++)
2515
            XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2516
                                              nregs, replace_dest);
2517
        }
2518
    }
2519
  return x;
2520
}
2521
 
2522
/* Replace occurrences of the old label in *X with the new one.
2523
   DATA is a REPLACE_LABEL_DATA containing the old and new labels.  */
2524
 
2525
int
2526
replace_label (rtx *x, void *data)
2527
{
2528
  rtx l = *x;
2529
  rtx old_label = ((replace_label_data *) data)->r1;
2530
  rtx new_label = ((replace_label_data *) data)->r2;
2531
  bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2532
 
2533
  if (l == NULL_RTX)
2534
    return 0;
2535
 
2536
  if (GET_CODE (l) == SYMBOL_REF
2537
      && CONSTANT_POOL_ADDRESS_P (l))
2538
    {
2539
      rtx c = get_pool_constant (l);
2540
      if (rtx_referenced_p (old_label, c))
2541
        {
2542
          rtx new_c, new_l;
2543
          replace_label_data *d = (replace_label_data *) data;
2544
 
2545
          /* Create a copy of constant C; replace the label inside
2546
             but do not update LABEL_NUSES because uses in constant pool
2547
             are not counted.  */
2548
          new_c = copy_rtx (c);
2549
          d->update_label_nuses = false;
2550
          for_each_rtx (&new_c, replace_label, data);
2551
          d->update_label_nuses = update_label_nuses;
2552
 
2553
          /* Add the new constant NEW_C to constant pool and replace
2554
             the old reference to constant by new reference.  */
2555
          new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2556
          *x = replace_rtx (l, l, new_l);
2557
        }
2558
      return 0;
2559
    }
2560
 
2561
  /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2562
     field.  This is not handled by for_each_rtx because it doesn't
2563
     handle unprinted ('0') fields.  */
2564
  if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2565
    JUMP_LABEL (l) = new_label;
2566
 
2567
  if ((GET_CODE (l) == LABEL_REF
2568
       || GET_CODE (l) == INSN_LIST)
2569
      && XEXP (l, 0) == old_label)
2570
    {
2571
      XEXP (l, 0) = new_label;
2572
      if (update_label_nuses)
2573
        {
2574
          ++LABEL_NUSES (new_label);
2575
          --LABEL_NUSES (old_label);
2576
        }
2577
      return 0;
2578
    }
2579
 
2580
  return 0;
2581
}
2582
 
2583
/* When *BODY is equal to X or X is directly referenced by *BODY
2584
   return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2585
   too, otherwise FOR_EACH_RTX continues traversing *BODY.  */
2586
 
2587
static int
2588
rtx_referenced_p_1 (rtx *body, void *x)
2589
{
2590
  rtx y = (rtx) x;
2591
 
2592
  if (*body == NULL_RTX)
2593
    return y == NULL_RTX;
2594
 
2595
  /* Return true if a label_ref *BODY refers to label Y.  */
2596
  if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2597
    return XEXP (*body, 0) == y;
2598
 
2599
  /* If *BODY is a reference to pool constant traverse the constant.  */
2600
  if (GET_CODE (*body) == SYMBOL_REF
2601
      && CONSTANT_POOL_ADDRESS_P (*body))
2602
    return rtx_referenced_p (y, get_pool_constant (*body));
2603
 
2604
  /* By default, compare the RTL expressions.  */
2605
  return rtx_equal_p (*body, y);
2606
}
2607
 
2608
/* Return true if X is referenced in BODY.  */
2609
 
2610
int
2611
rtx_referenced_p (rtx x, rtx body)
2612
{
2613
  return for_each_rtx (&body, rtx_referenced_p_1, x);
2614
}
2615
 
2616
/* If INSN is a tablejump return true and store the label (before jump table) to
2617
   *LABELP and the jump table to *TABLEP.  LABELP and TABLEP may be NULL.  */
2618
 
2619
bool
2620
tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2621
{
2622
  rtx label, table;
2623
 
2624
  if (JUMP_P (insn)
2625
      && (label = JUMP_LABEL (insn)) != NULL_RTX
2626
      && (table = next_active_insn (label)) != NULL_RTX
2627
      && JUMP_P (table)
2628
      && (GET_CODE (PATTERN (table)) == ADDR_VEC
2629
          || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2630
    {
2631
      if (labelp)
2632
        *labelp = label;
2633
      if (tablep)
2634
        *tablep = table;
2635
      return true;
2636
    }
2637
  return false;
2638
}
2639
 
2640
/* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2641
   constant that is not in the constant pool and not in the condition
2642
   of an IF_THEN_ELSE.  */
2643
 
2644
static int
2645
computed_jump_p_1 (rtx x)
2646
{
2647
  enum rtx_code code = GET_CODE (x);
2648
  int i, j;
2649
  const char *fmt;
2650
 
2651
  switch (code)
2652
    {
2653
    case LABEL_REF:
2654
    case PC:
2655
      return 0;
2656
 
2657
    case CONST:
2658
    case CONST_INT:
2659
    case CONST_DOUBLE:
2660
    case CONST_VECTOR:
2661
    case SYMBOL_REF:
2662
    case REG:
2663
      return 1;
2664
 
2665
    case MEM:
2666
      return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2667
                && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2668
 
2669
    case IF_THEN_ELSE:
2670
      return (computed_jump_p_1 (XEXP (x, 1))
2671
              || computed_jump_p_1 (XEXP (x, 2)));
2672
 
2673
    default:
2674
      break;
2675
    }
2676
 
2677
  fmt = GET_RTX_FORMAT (code);
2678
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2679
    {
2680
      if (fmt[i] == 'e'
2681
          && computed_jump_p_1 (XEXP (x, i)))
2682
        return 1;
2683
 
2684
      else if (fmt[i] == 'E')
2685
        for (j = 0; j < XVECLEN (x, i); j++)
2686
          if (computed_jump_p_1 (XVECEXP (x, i, j)))
2687
            return 1;
2688
    }
2689
 
2690
  return 0;
2691
}
2692
 
2693
/* Return nonzero if INSN is an indirect jump (aka computed jump).
2694
 
2695
   Tablejumps and casesi insns are not considered indirect jumps;
2696
   we can recognize them by a (use (label_ref)).  */
2697
 
2698
int
2699
computed_jump_p (rtx insn)
2700
{
2701
  int i;
2702
  if (JUMP_P (insn))
2703
    {
2704
      rtx pat = PATTERN (insn);
2705
 
2706
      if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2707
        return 0;
2708
      else if (GET_CODE (pat) == PARALLEL)
2709
        {
2710
          int len = XVECLEN (pat, 0);
2711
          int has_use_labelref = 0;
2712
 
2713
          for (i = len - 1; i >= 0; i--)
2714
            if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2715
                && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2716
                    == LABEL_REF))
2717
              has_use_labelref = 1;
2718
 
2719
          if (! has_use_labelref)
2720
            for (i = len - 1; i >= 0; i--)
2721
              if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2722
                  && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2723
                  && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2724
                return 1;
2725
        }
2726
      else if (GET_CODE (pat) == SET
2727
               && SET_DEST (pat) == pc_rtx
2728
               && computed_jump_p_1 (SET_SRC (pat)))
2729
        return 1;
2730
    }
2731
  return 0;
2732
}
2733
 
2734
/* Optimized loop of for_each_rtx, trying to avoid useless recursive
2735
   calls.  Processes the subexpressions of EXP and passes them to F.  */
2736
static int
2737
for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2738
{
2739
  int result, i, j;
2740
  const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2741
  rtx *x;
2742
 
2743
  for (; format[n] != '\0'; n++)
2744
    {
2745
      switch (format[n])
2746
        {
2747
        case 'e':
2748
          /* Call F on X.  */
2749
          x = &XEXP (exp, n);
2750
          result = (*f) (x, data);
2751
          if (result == -1)
2752
            /* Do not traverse sub-expressions.  */
2753
            continue;
2754
          else if (result != 0)
2755
            /* Stop the traversal.  */
2756
            return result;
2757
 
2758
          if (*x == NULL_RTX)
2759
            /* There are no sub-expressions.  */
2760
            continue;
2761
 
2762
          i = non_rtx_starting_operands[GET_CODE (*x)];
2763
          if (i >= 0)
2764
            {
2765
              result = for_each_rtx_1 (*x, i, f, data);
2766
              if (result != 0)
2767
                return result;
2768
            }
2769
          break;
2770
 
2771
        case 'V':
2772
        case 'E':
2773
          if (XVEC (exp, n) == 0)
2774
            continue;
2775
          for (j = 0; j < XVECLEN (exp, n); ++j)
2776
            {
2777
              /* Call F on X.  */
2778
              x = &XVECEXP (exp, n, j);
2779
              result = (*f) (x, data);
2780
              if (result == -1)
2781
                /* Do not traverse sub-expressions.  */
2782
                continue;
2783
              else if (result != 0)
2784
                /* Stop the traversal.  */
2785
                return result;
2786
 
2787
              if (*x == NULL_RTX)
2788
                /* There are no sub-expressions.  */
2789
                continue;
2790
 
2791
              i = non_rtx_starting_operands[GET_CODE (*x)];
2792
              if (i >= 0)
2793
                {
2794
                  result = for_each_rtx_1 (*x, i, f, data);
2795
                  if (result != 0)
2796
                    return result;
2797
                }
2798
            }
2799
          break;
2800
 
2801
        default:
2802
          /* Nothing to do.  */
2803
          break;
2804
        }
2805
    }
2806
 
2807
  return 0;
2808
}
2809
 
2810
/* Traverse X via depth-first search, calling F for each
2811
   sub-expression (including X itself).  F is also passed the DATA.
2812
   If F returns -1, do not traverse sub-expressions, but continue
2813
   traversing the rest of the tree.  If F ever returns any other
2814
   nonzero value, stop the traversal, and return the value returned
2815
   by F.  Otherwise, return 0.  This function does not traverse inside
2816
   tree structure that contains RTX_EXPRs, or into sub-expressions
2817
   whose format code is `0' since it is not known whether or not those
2818
   codes are actually RTL.
2819
 
2820
   This routine is very general, and could (should?) be used to
2821
   implement many of the other routines in this file.  */
2822
 
2823
int
2824
for_each_rtx (rtx *x, rtx_function f, void *data)
2825
{
2826
  int result;
2827
  int i;
2828
 
2829
  /* Call F on X.  */
2830
  result = (*f) (x, data);
2831
  if (result == -1)
2832
    /* Do not traverse sub-expressions.  */
2833
    return 0;
2834
  else if (result != 0)
2835
    /* Stop the traversal.  */
2836
    return result;
2837
 
2838
  if (*x == NULL_RTX)
2839
    /* There are no sub-expressions.  */
2840
    return 0;
2841
 
2842
  i = non_rtx_starting_operands[GET_CODE (*x)];
2843
  if (i < 0)
2844
    return 0;
2845
 
2846
  return for_each_rtx_1 (*x, i, f, data);
2847
}
2848
 
2849
 
2850
/* Searches X for any reference to REGNO, returning the rtx of the
2851
   reference found if any.  Otherwise, returns NULL_RTX.  */
2852
 
2853
rtx
2854
regno_use_in (unsigned int regno, rtx x)
2855
{
2856
  const char *fmt;
2857
  int i, j;
2858
  rtx tem;
2859
 
2860
  if (REG_P (x) && REGNO (x) == regno)
2861
    return x;
2862
 
2863
  fmt = GET_RTX_FORMAT (GET_CODE (x));
2864
  for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2865
    {
2866
      if (fmt[i] == 'e')
2867
        {
2868
          if ((tem = regno_use_in (regno, XEXP (x, i))))
2869
            return tem;
2870
        }
2871
      else if (fmt[i] == 'E')
2872
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2873
          if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2874
            return tem;
2875
    }
2876
 
2877
  return NULL_RTX;
2878
}
2879
 
2880
/* Return a value indicating whether OP, an operand of a commutative
2881
   operation, is preferred as the first or second operand.  The higher
2882
   the value, the stronger the preference for being the first operand.
2883
   We use negative values to indicate a preference for the first operand
2884
   and positive values for the second operand.  */
2885
 
2886
int
2887
commutative_operand_precedence (rtx op)
2888
{
2889
  enum rtx_code code = GET_CODE (op);
2890
 
2891
  /* Constants always come the second operand.  Prefer "nice" constants.  */
2892
  if (code == CONST_INT)
2893
    return -7;
2894
  if (code == CONST_DOUBLE)
2895
    return -6;
2896
  op = avoid_constant_pool_reference (op);
2897
  code = GET_CODE (op);
2898
 
2899
  switch (GET_RTX_CLASS (code))
2900
    {
2901
    case RTX_CONST_OBJ:
2902
      if (code == CONST_INT)
2903
        return -5;
2904
      if (code == CONST_DOUBLE)
2905
        return -4;
2906
      return -3;
2907
 
2908
    case RTX_EXTRA:
2909
      /* SUBREGs of objects should come second.  */
2910
      if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2911
        return -2;
2912
 
2913
      if (!CONSTANT_P (op))
2914
        return 0;
2915
      else
2916
        /* As for RTX_CONST_OBJ.  */
2917
        return -3;
2918
 
2919
    case RTX_OBJ:
2920
      /* Complex expressions should be the first, so decrease priority
2921
         of objects.  */
2922
      return -1;
2923
 
2924
    case RTX_COMM_ARITH:
2925
      /* Prefer operands that are themselves commutative to be first.
2926
         This helps to make things linear.  In particular,
2927
         (and (and (reg) (reg)) (not (reg))) is canonical.  */
2928
      return 4;
2929
 
2930
    case RTX_BIN_ARITH:
2931
      /* If only one operand is a binary expression, it will be the first
2932
         operand.  In particular,  (plus (minus (reg) (reg)) (neg (reg)))
2933
         is canonical, although it will usually be further simplified.  */
2934
      return 2;
2935
 
2936
    case RTX_UNARY:
2937
      /* Then prefer NEG and NOT.  */
2938
      if (code == NEG || code == NOT)
2939
        return 1;
2940
 
2941
    default:
2942
      return 0;
2943
    }
2944
}
2945
 
2946
/* Return 1 iff it is necessary to swap operands of commutative operation
2947
   in order to canonicalize expression.  */
2948
 
2949
int
2950
swap_commutative_operands_p (rtx x, rtx y)
2951
{
2952
  return (commutative_operand_precedence (x)
2953
          < commutative_operand_precedence (y));
2954
}
2955
 
2956
/* Return 1 if X is an autoincrement side effect and the register is
2957
   not the stack pointer.  */
2958
int
2959
auto_inc_p (rtx x)
2960
{
2961
  switch (GET_CODE (x))
2962
    {
2963
    case PRE_INC:
2964
    case POST_INC:
2965
    case PRE_DEC:
2966
    case POST_DEC:
2967
    case PRE_MODIFY:
2968
    case POST_MODIFY:
2969
      /* There are no REG_INC notes for SP.  */
2970
      if (XEXP (x, 0) != stack_pointer_rtx)
2971
        return 1;
2972
    default:
2973
      break;
2974
    }
2975
  return 0;
2976
}
2977
 
2978
/* Return 1 if the sequence of instructions beginning with FROM and up
2979
   to and including TO is safe to move.  If NEW_TO is non-NULL, and
2980
   the sequence is not already safe to move, but can be easily
2981
   extended to a sequence which is safe, then NEW_TO will point to the
2982
   end of the extended sequence.
2983
 
2984
   For now, this function only checks that the region contains whole
2985
   exception regions, but it could be extended to check additional
2986
   conditions as well.  */
2987
 
2988
int
2989
insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
2990
{
2991
  int eh_region_count = 0;
2992
  int past_to_p = 0;
2993
  rtx r = from;
2994
 
2995
  /* By default, assume the end of the region will be what was
2996
     suggested.  */
2997
  if (new_to)
2998
    *new_to = to;
2999
 
3000
  while (r)
3001
    {
3002
      if (NOTE_P (r))
3003
        {
3004
          switch (NOTE_LINE_NUMBER (r))
3005
            {
3006
            case NOTE_INSN_EH_REGION_BEG:
3007
              ++eh_region_count;
3008
              break;
3009
 
3010
            case NOTE_INSN_EH_REGION_END:
3011
              if (eh_region_count == 0)
3012
                /* This sequence of instructions contains the end of
3013
                   an exception region, but not he beginning.  Moving
3014
                   it will cause chaos.  */
3015
                return 0;
3016
 
3017
              --eh_region_count;
3018
              break;
3019
 
3020
            default:
3021
              break;
3022
            }
3023
        }
3024
      else if (past_to_p)
3025
        /* If we've passed TO, and we see a non-note instruction, we
3026
           can't extend the sequence to a movable sequence.  */
3027
        return 0;
3028
 
3029
      if (r == to)
3030
        {
3031
          if (!new_to)
3032
            /* It's OK to move the sequence if there were matched sets of
3033
               exception region notes.  */
3034
            return eh_region_count == 0;
3035
 
3036
          past_to_p = 1;
3037
        }
3038
 
3039
      /* It's OK to move the sequence if there were matched sets of
3040
         exception region notes.  */
3041
      if (past_to_p && eh_region_count == 0)
3042
        {
3043
          *new_to = r;
3044
          return 1;
3045
        }
3046
 
3047
      /* Go to the next instruction.  */
3048
      r = NEXT_INSN (r);
3049
    }
3050
 
3051
  return 0;
3052
}
3053
 
3054
/* Return nonzero if IN contains a piece of rtl that has the address LOC.  */
3055
int
3056
loc_mentioned_in_p (rtx *loc, rtx in)
3057
{
3058
  enum rtx_code code = GET_CODE (in);
3059
  const char *fmt = GET_RTX_FORMAT (code);
3060
  int i, j;
3061
 
3062
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3063
    {
3064
      if (loc == &in->u.fld[i].rt_rtx)
3065
        return 1;
3066
      if (fmt[i] == 'e')
3067
        {
3068
          if (loc_mentioned_in_p (loc, XEXP (in, i)))
3069
            return 1;
3070
        }
3071
      else if (fmt[i] == 'E')
3072
        for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3073
          if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3074
            return 1;
3075
    }
3076
  return 0;
3077
}
3078
 
3079
/* Helper function for subreg_lsb.  Given a subreg's OUTER_MODE, INNER_MODE,
3080
   and SUBREG_BYTE, return the bit offset where the subreg begins
3081
   (counting from the least significant bit of the operand).  */
3082
 
3083
unsigned int
3084
subreg_lsb_1 (enum machine_mode outer_mode,
3085
              enum machine_mode inner_mode,
3086
              unsigned int subreg_byte)
3087
{
3088
  unsigned int bitpos;
3089
  unsigned int byte;
3090
  unsigned int word;
3091
 
3092
  /* A paradoxical subreg begins at bit position 0.  */
3093
  if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3094
    return 0;
3095
 
3096
  if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3097
    /* If the subreg crosses a word boundary ensure that
3098
       it also begins and ends on a word boundary.  */
3099
    gcc_assert (!((subreg_byte % UNITS_PER_WORD
3100
                  + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3101
                  && (subreg_byte % UNITS_PER_WORD
3102
                      || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3103
 
3104
  if (WORDS_BIG_ENDIAN)
3105
    word = (GET_MODE_SIZE (inner_mode)
3106
            - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3107
  else
3108
    word = subreg_byte / UNITS_PER_WORD;
3109
  bitpos = word * BITS_PER_WORD;
3110
 
3111
  if (BYTES_BIG_ENDIAN)
3112
    byte = (GET_MODE_SIZE (inner_mode)
3113
            - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3114
  else
3115
    byte = subreg_byte % UNITS_PER_WORD;
3116
  bitpos += byte * BITS_PER_UNIT;
3117
 
3118
  return bitpos;
3119
}
3120
 
3121
/* Given a subreg X, return the bit offset where the subreg begins
3122
   (counting from the least significant bit of the reg).  */
3123
 
3124
unsigned int
3125
subreg_lsb (rtx x)
3126
{
3127
  return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3128
                       SUBREG_BYTE (x));
3129
}
3130
 
3131
/* This function returns the regno offset of a subreg expression.
3132
   xregno - A regno of an inner hard subreg_reg (or what will become one).
3133
   xmode  - The mode of xregno.
3134
   offset - The byte offset.
3135
   ymode  - The mode of a top level SUBREG (or what may become one).
3136
   RETURN - The regno offset which would be used.  */
3137
unsigned int
3138
subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3139
                     unsigned int offset, enum machine_mode ymode)
3140
{
3141
  int nregs_xmode, nregs_ymode, nregs_xmode_unit_int;
3142
  int mode_multiple, nregs_multiple;
3143
  int y_offset;
3144
  enum machine_mode xmode_unit, xmode_unit_int;
3145
 
3146
  gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3147
 
3148
  if (GET_MODE_INNER (xmode) == VOIDmode)
3149
    xmode_unit = xmode;
3150
  else
3151
    xmode_unit = GET_MODE_INNER (xmode);
3152
 
3153
  if (FLOAT_MODE_P (xmode_unit))
3154
    {
3155
      xmode_unit_int = int_mode_for_mode (xmode_unit);
3156
      if (xmode_unit_int == BLKmode)
3157
        /* It's probably bad to be here; a port should have an integer mode
3158
           that's the same size as anything of which it takes a SUBREG.  */
3159
        xmode_unit_int = xmode_unit;
3160
    }
3161
  else
3162
    xmode_unit_int = xmode_unit;
3163
 
3164
  nregs_xmode_unit_int = hard_regno_nregs[xregno][xmode_unit_int];
3165
 
3166
  /* Adjust nregs_xmode to allow for 'holes'.  */
3167
  if (nregs_xmode_unit_int != hard_regno_nregs[xregno][xmode_unit])
3168
    nregs_xmode = nregs_xmode_unit_int * GET_MODE_NUNITS (xmode);
3169
  else
3170
    nregs_xmode = hard_regno_nregs[xregno][xmode];
3171
 
3172
  nregs_ymode = hard_regno_nregs[xregno][ymode];
3173
 
3174
  /* If this is a big endian paradoxical subreg, which uses more actual
3175
     hard registers than the original register, we must return a negative
3176
     offset so that we find the proper highpart of the register.  */
3177
  if (offset == 0
3178
      && nregs_ymode > nregs_xmode
3179
      && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3180
          ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3181
    return nregs_xmode - nregs_ymode;
3182
 
3183
  if (offset == 0 || nregs_xmode == nregs_ymode)
3184
    return 0;
3185
 
3186
  /* Size of ymode must not be greater than the size of xmode.  */
3187
  mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3188
  gcc_assert (mode_multiple != 0);
3189
 
3190
  y_offset = offset / GET_MODE_SIZE (ymode);
3191
  nregs_multiple =  nregs_xmode / nregs_ymode;
3192
  return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3193
}
3194
 
3195
/* This function returns true when the offset is representable via
3196
   subreg_offset in the given regno.
3197
   xregno - A regno of an inner hard subreg_reg (or what will become one).
3198
   xmode  - The mode of xregno.
3199
   offset - The byte offset.
3200
   ymode  - The mode of a top level SUBREG (or what may become one).
3201
   RETURN - Whether the offset is representable.  */
3202
bool
3203
subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3204
                               unsigned int offset, enum machine_mode ymode)
3205
{
3206
  int nregs_xmode, nregs_ymode, nregs_xmode_unit, nregs_xmode_unit_int;
3207
  int mode_multiple, nregs_multiple;
3208
  int y_offset;
3209
  enum machine_mode xmode_unit, xmode_unit_int;
3210
 
3211
  gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3212
 
3213
  if (GET_MODE_INNER (xmode) == VOIDmode)
3214
    xmode_unit = xmode;
3215
  else
3216
    xmode_unit = GET_MODE_INNER (xmode);
3217
 
3218
  if (FLOAT_MODE_P (xmode_unit))
3219
    {
3220
      xmode_unit_int = int_mode_for_mode (xmode_unit);
3221
      if (xmode_unit_int == BLKmode)
3222
        /* It's probably bad to be here; a port should have an integer mode
3223
           that's the same size as anything of which it takes a SUBREG.  */
3224
        xmode_unit_int = xmode_unit;
3225
    }
3226
  else
3227
    xmode_unit_int = xmode_unit;
3228
 
3229
  nregs_xmode_unit = hard_regno_nregs[xregno][xmode_unit];
3230
  nregs_xmode_unit_int = hard_regno_nregs[xregno][xmode_unit_int];
3231
 
3232
  /* If there are holes in a non-scalar mode in registers, we expect
3233
     that it is made up of its units concatenated together.  */
3234
  if (nregs_xmode_unit != nregs_xmode_unit_int)
3235
    {
3236
      gcc_assert (nregs_xmode_unit * GET_MODE_NUNITS (xmode)
3237
                  == hard_regno_nregs[xregno][xmode]);
3238
 
3239
      /* You can only ask for a SUBREG of a value with holes in the middle
3240
         if you don't cross the holes.  (Such a SUBREG should be done by
3241
         picking a different register class, or doing it in memory if
3242
         necessary.)  An example of a value with holes is XCmode on 32-bit
3243
         x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3244
         3 for each part, but in memory it's two 128-bit parts.
3245
         Padding is assumed to be at the end (not necessarily the 'high part')
3246
         of each unit.  */
3247
      if (nregs_xmode_unit != nregs_xmode_unit_int
3248
          && (offset / GET_MODE_SIZE (xmode_unit_int) + 1
3249
              < GET_MODE_NUNITS (xmode))
3250
          && (offset / GET_MODE_SIZE (xmode_unit_int)
3251
              != ((offset + GET_MODE_SIZE (ymode) - 1)
3252
                  / GET_MODE_SIZE (xmode_unit_int))))
3253
        return false;
3254
 
3255
      nregs_xmode = nregs_xmode_unit_int * GET_MODE_NUNITS (xmode);
3256
    }
3257
  else
3258
    nregs_xmode = hard_regno_nregs[xregno][xmode];
3259
 
3260
  nregs_ymode = hard_regno_nregs[xregno][ymode];
3261
 
3262
  /* Paradoxical subregs are otherwise valid.  */
3263
  if (offset == 0
3264
      && nregs_ymode > nregs_xmode
3265
      && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3266
          ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3267
    return true;
3268
 
3269
  /* Lowpart subregs are otherwise valid.  */
3270
  if (offset == subreg_lowpart_offset (ymode, xmode))
3271
    return true;
3272
 
3273
  /* This should always pass, otherwise we don't know how to verify
3274
     the constraint.  These conditions may be relaxed but
3275
     subreg_regno_offset would need to be redesigned.  */
3276
  gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3277
  gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3278
 
3279
  /* The XMODE value can be seen as a vector of NREGS_XMODE
3280
     values.  The subreg must represent a lowpart of given field.
3281
     Compute what field it is.  */
3282
  offset -= subreg_lowpart_offset (ymode,
3283
                                   mode_for_size (GET_MODE_BITSIZE (xmode)
3284
                                                  / nregs_xmode,
3285
                                                  MODE_INT, 0));
3286
 
3287
  /* Size of ymode must not be greater than the size of xmode.  */
3288
  mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3289
  gcc_assert (mode_multiple != 0);
3290
 
3291
  y_offset = offset / GET_MODE_SIZE (ymode);
3292
  nregs_multiple =  nregs_xmode / nregs_ymode;
3293
 
3294
  gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3295
  gcc_assert ((mode_multiple % nregs_multiple) == 0);
3296
 
3297
  return (!(y_offset % (mode_multiple / nregs_multiple)));
3298
}
3299
 
3300
/* Return the final regno that a subreg expression refers to.  */
3301
unsigned int
3302
subreg_regno (rtx x)
3303
{
3304
  unsigned int ret;
3305
  rtx subreg = SUBREG_REG (x);
3306
  int regno = REGNO (subreg);
3307
 
3308
  ret = regno + subreg_regno_offset (regno,
3309
                                     GET_MODE (subreg),
3310
                                     SUBREG_BYTE (x),
3311
                                     GET_MODE (x));
3312
  return ret;
3313
 
3314
}
3315
struct parms_set_data
3316
{
3317
  int nregs;
3318
  HARD_REG_SET regs;
3319
};
3320
 
3321
/* Helper function for noticing stores to parameter registers.  */
3322
static void
3323
parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3324
{
3325
  struct parms_set_data *d = data;
3326
  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3327
      && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3328
    {
3329
      CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3330
      d->nregs--;
3331
    }
3332
}
3333
 
3334
/* Look backward for first parameter to be loaded.
3335
   Note that loads of all parameters will not necessarily be
3336
   found if CSE has eliminated some of them (e.g., an argument
3337
   to the outer function is passed down as a parameter).
3338
   Do not skip BOUNDARY.  */
3339
rtx
3340
find_first_parameter_load (rtx call_insn, rtx boundary)
3341
{
3342
  struct parms_set_data parm;
3343
  rtx p, before, first_set;
3344
 
3345
  /* Since different machines initialize their parameter registers
3346
     in different orders, assume nothing.  Collect the set of all
3347
     parameter registers.  */
3348
  CLEAR_HARD_REG_SET (parm.regs);
3349
  parm.nregs = 0;
3350
  for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3351
    if (GET_CODE (XEXP (p, 0)) == USE
3352
        && REG_P (XEXP (XEXP (p, 0), 0)))
3353
      {
3354
        gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3355
 
3356
        /* We only care about registers which can hold function
3357
           arguments.  */
3358
        if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3359
          continue;
3360
 
3361
        SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3362
        parm.nregs++;
3363
      }
3364
  before = call_insn;
3365
  first_set = call_insn;
3366
 
3367
  /* Search backward for the first set of a register in this set.  */
3368
  while (parm.nregs && before != boundary)
3369
    {
3370
      before = PREV_INSN (before);
3371
 
3372
      /* It is possible that some loads got CSEed from one call to
3373
         another.  Stop in that case.  */
3374
      if (CALL_P (before))
3375
        break;
3376
 
3377
      /* Our caller needs either ensure that we will find all sets
3378
         (in case code has not been optimized yet), or take care
3379
         for possible labels in a way by setting boundary to preceding
3380
         CODE_LABEL.  */
3381
      if (LABEL_P (before))
3382
        {
3383
          gcc_assert (before == boundary);
3384
          break;
3385
        }
3386
 
3387
      if (INSN_P (before))
3388
        {
3389
          int nregs_old = parm.nregs;
3390
          note_stores (PATTERN (before), parms_set, &parm);
3391
          /* If we found something that did not set a parameter reg,
3392
             we're done.  Do not keep going, as that might result
3393
             in hoisting an insn before the setting of a pseudo
3394
             that is used by the hoisted insn. */
3395
          if (nregs_old != parm.nregs)
3396
            first_set = before;
3397
          else
3398
            break;
3399
        }
3400
    }
3401
  return first_set;
3402
}
3403
 
3404
/* Return true if we should avoid inserting code between INSN and preceding
3405
   call instruction.  */
3406
 
3407
bool
3408
keep_with_call_p (rtx insn)
3409
{
3410
  rtx set;
3411
 
3412
  if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3413
    {
3414
      if (REG_P (SET_DEST (set))
3415
          && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3416
          && fixed_regs[REGNO (SET_DEST (set))]
3417
          && general_operand (SET_SRC (set), VOIDmode))
3418
        return true;
3419
      if (REG_P (SET_SRC (set))
3420
          && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3421
          && REG_P (SET_DEST (set))
3422
          && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3423
        return true;
3424
      /* There may be a stack pop just after the call and before the store
3425
         of the return register.  Search for the actual store when deciding
3426
         if we can break or not.  */
3427
      if (SET_DEST (set) == stack_pointer_rtx)
3428
        {
3429
          rtx i2 = next_nonnote_insn (insn);
3430
          if (i2 && keep_with_call_p (i2))
3431
            return true;
3432
        }
3433
    }
3434
  return false;
3435
}
3436
 
3437
/* Return true if LABEL is a target of JUMP_INSN.  This applies only
3438
   to non-complex jumps.  That is, direct unconditional, conditional,
3439
   and tablejumps, but not computed jumps or returns.  It also does
3440
   not apply to the fallthru case of a conditional jump.  */
3441
 
3442
bool
3443
label_is_jump_target_p (rtx label, rtx jump_insn)
3444
{
3445
  rtx tmp = JUMP_LABEL (jump_insn);
3446
 
3447
  if (label == tmp)
3448
    return true;
3449
 
3450
  if (tablejump_p (jump_insn, NULL, &tmp))
3451
    {
3452
      rtvec vec = XVEC (PATTERN (tmp),
3453
                        GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3454
      int i, veclen = GET_NUM_ELEM (vec);
3455
 
3456
      for (i = 0; i < veclen; ++i)
3457
        if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3458
          return true;
3459
    }
3460
 
3461
  return false;
3462
}
3463
 
3464
 
3465
/* Return an estimate of the cost of computing rtx X.
3466
   One use is in cse, to decide which expression to keep in the hash table.
3467
   Another is in rtl generation, to pick the cheapest way to multiply.
3468
   Other uses like the latter are expected in the future.  */
3469
 
3470
int
3471
rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3472
{
3473
  int i, j;
3474
  enum rtx_code code;
3475
  const char *fmt;
3476
  int total;
3477
 
3478
  if (x == 0)
3479
    return 0;
3480
 
3481
  /* Compute the default costs of certain things.
3482
     Note that targetm.rtx_costs can override the defaults.  */
3483
 
3484
  code = GET_CODE (x);
3485
  switch (code)
3486
    {
3487
    case MULT:
3488
      total = COSTS_N_INSNS (5);
3489
      break;
3490
    case DIV:
3491
    case UDIV:
3492
    case MOD:
3493
    case UMOD:
3494
      total = COSTS_N_INSNS (7);
3495
      break;
3496
    case USE:
3497
      /* Used in loop.c and combine.c as a marker.  */
3498
      total = 0;
3499
      break;
3500
    default:
3501
      total = COSTS_N_INSNS (1);
3502
    }
3503
 
3504
  switch (code)
3505
    {
3506
    case REG:
3507
      return 0;
3508
 
3509
    case SUBREG:
3510
      total = 0;
3511
      /* If we can't tie these modes, make this expensive.  The larger
3512
         the mode, the more expensive it is.  */
3513
      if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3514
        return COSTS_N_INSNS (2
3515
                              + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3516
      break;
3517
 
3518
    default:
3519
      if (targetm.rtx_costs (x, code, outer_code, &total))
3520
        return total;
3521
      break;
3522
    }
3523
 
3524
  /* Sum the costs of the sub-rtx's, plus cost of this operation,
3525
     which is already in total.  */
3526
 
3527
  fmt = GET_RTX_FORMAT (code);
3528
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3529
    if (fmt[i] == 'e')
3530
      total += rtx_cost (XEXP (x, i), code);
3531
    else if (fmt[i] == 'E')
3532
      for (j = 0; j < XVECLEN (x, i); j++)
3533
        total += rtx_cost (XVECEXP (x, i, j), code);
3534
 
3535
  return total;
3536
}
3537
 
3538
/* Return cost of address expression X.
3539
   Expect that X is properly formed address reference.  */
3540
 
3541
int
3542
address_cost (rtx x, enum machine_mode mode)
3543
{
3544
  /* We may be asked for cost of various unusual addresses, such as operands
3545
     of push instruction.  It is not worthwhile to complicate writing
3546
     of the target hook by such cases.  */
3547
 
3548
  if (!memory_address_p (mode, x))
3549
    return 1000;
3550
 
3551
  return targetm.address_cost (x);
3552
}
3553
 
3554
/* If the target doesn't override, compute the cost as with arithmetic.  */
3555
 
3556
int
3557
default_address_cost (rtx x)
3558
{
3559
  return rtx_cost (x, MEM);
3560
}
3561
 
3562
 
3563
unsigned HOST_WIDE_INT
3564
nonzero_bits (rtx x, enum machine_mode mode)
3565
{
3566
  return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3567
}
3568
 
3569
unsigned int
3570
num_sign_bit_copies (rtx x, enum machine_mode mode)
3571
{
3572
  return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3573
}
3574
 
3575
/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3576
   It avoids exponential behavior in nonzero_bits1 when X has
3577
   identical subexpressions on the first or the second level.  */
3578
 
3579
static unsigned HOST_WIDE_INT
3580
cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3581
                     enum machine_mode known_mode,
3582
                     unsigned HOST_WIDE_INT known_ret)
3583
{
3584
  if (x == known_x && mode == known_mode)
3585
    return known_ret;
3586
 
3587
  /* Try to find identical subexpressions.  If found call
3588
     nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3589
     precomputed value for the subexpression as KNOWN_RET.  */
3590
 
3591
  if (ARITHMETIC_P (x))
3592
    {
3593
      rtx x0 = XEXP (x, 0);
3594
      rtx x1 = XEXP (x, 1);
3595
 
3596
      /* Check the first level.  */
3597
      if (x0 == x1)
3598
        return nonzero_bits1 (x, mode, x0, mode,
3599
                              cached_nonzero_bits (x0, mode, known_x,
3600
                                                   known_mode, known_ret));
3601
 
3602
      /* Check the second level.  */
3603
      if (ARITHMETIC_P (x0)
3604
          && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3605
        return nonzero_bits1 (x, mode, x1, mode,
3606
                              cached_nonzero_bits (x1, mode, known_x,
3607
                                                   known_mode, known_ret));
3608
 
3609
      if (ARITHMETIC_P (x1)
3610
          && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3611
        return nonzero_bits1 (x, mode, x0, mode,
3612
                              cached_nonzero_bits (x0, mode, known_x,
3613
                                                   known_mode, known_ret));
3614
    }
3615
 
3616
  return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3617
}
3618
 
3619
/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3620
   We don't let nonzero_bits recur into num_sign_bit_copies, because that
3621
   is less useful.  We can't allow both, because that results in exponential
3622
   run time recursion.  There is a nullstone testcase that triggered
3623
   this.  This macro avoids accidental uses of num_sign_bit_copies.  */
3624
#define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3625
 
3626
/* Given an expression, X, compute which bits in X can be nonzero.
3627
   We don't care about bits outside of those defined in MODE.
3628
 
3629
   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3630
   an arithmetic operation, we can do better.  */
3631
 
3632
static unsigned HOST_WIDE_INT
3633
nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3634
               enum machine_mode known_mode,
3635
               unsigned HOST_WIDE_INT known_ret)
3636
{
3637
  unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3638
  unsigned HOST_WIDE_INT inner_nz;
3639
  enum rtx_code code;
3640
  unsigned int mode_width = GET_MODE_BITSIZE (mode);
3641
 
3642
  /* For floating-point values, assume all bits are needed.  */
3643
  if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3644
    return nonzero;
3645
 
3646
  /* If X is wider than MODE, use its mode instead.  */
3647
  if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3648
    {
3649
      mode = GET_MODE (x);
3650
      nonzero = GET_MODE_MASK (mode);
3651
      mode_width = GET_MODE_BITSIZE (mode);
3652
    }
3653
 
3654
  if (mode_width > HOST_BITS_PER_WIDE_INT)
3655
    /* Our only callers in this case look for single bit values.  So
3656
       just return the mode mask.  Those tests will then be false.  */
3657
    return nonzero;
3658
 
3659
#ifndef WORD_REGISTER_OPERATIONS
3660
  /* If MODE is wider than X, but both are a single word for both the host
3661
     and target machines, we can compute this from which bits of the
3662
     object might be nonzero in its own mode, taking into account the fact
3663
     that on many CISC machines, accessing an object in a wider mode
3664
     causes the high-order bits to become undefined.  So they are
3665
     not known to be zero.  */
3666
 
3667
  if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3668
      && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3669
      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3670
      && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3671
    {
3672
      nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3673
                                      known_x, known_mode, known_ret);
3674
      nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3675
      return nonzero;
3676
    }
3677
#endif
3678
 
3679
  code = GET_CODE (x);
3680
  switch (code)
3681
    {
3682
    case REG:
3683
#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3684
      /* If pointers extend unsigned and this is a pointer in Pmode, say that
3685
         all the bits above ptr_mode are known to be zero.  */
3686
      if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3687
          && REG_POINTER (x))
3688
        nonzero &= GET_MODE_MASK (ptr_mode);
3689
#endif
3690
 
3691
      /* Include declared information about alignment of pointers.  */
3692
      /* ??? We don't properly preserve REG_POINTER changes across
3693
         pointer-to-integer casts, so we can't trust it except for
3694
         things that we know must be pointers.  See execute/960116-1.c.  */
3695
      if ((x == stack_pointer_rtx
3696
           || x == frame_pointer_rtx
3697
           || x == arg_pointer_rtx)
3698
          && REGNO_POINTER_ALIGN (REGNO (x)))
3699
        {
3700
          unsigned HOST_WIDE_INT alignment
3701
            = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3702
 
3703
#ifdef PUSH_ROUNDING
3704
          /* If PUSH_ROUNDING is defined, it is possible for the
3705
             stack to be momentarily aligned only to that amount,
3706
             so we pick the least alignment.  */
3707
          if (x == stack_pointer_rtx && PUSH_ARGS)
3708
            alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3709
                             alignment);
3710
#endif
3711
 
3712
          nonzero &= ~(alignment - 1);
3713
        }
3714
 
3715
      {
3716
        unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3717
        rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3718
                                              known_mode, known_ret,
3719
                                              &nonzero_for_hook);
3720
 
3721
        if (new)
3722
          nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3723
                                                   known_mode, known_ret);
3724
 
3725
        return nonzero_for_hook;
3726
      }
3727
 
3728
    case CONST_INT:
3729
#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3730
      /* If X is negative in MODE, sign-extend the value.  */
3731
      if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3732
          && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3733
        return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3734
#endif
3735
 
3736
      return INTVAL (x);
3737
 
3738
    case MEM:
3739
#ifdef LOAD_EXTEND_OP
3740
      /* In many, if not most, RISC machines, reading a byte from memory
3741
         zeros the rest of the register.  Noticing that fact saves a lot
3742
         of extra zero-extends.  */
3743
      if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3744
        nonzero &= GET_MODE_MASK (GET_MODE (x));
3745
#endif
3746
      break;
3747
 
3748
    case EQ:  case NE:
3749
    case UNEQ:  case LTGT:
3750
    case GT:  case GTU:  case UNGT:
3751
    case LT:  case LTU:  case UNLT:
3752
    case GE:  case GEU:  case UNGE:
3753
    case LE:  case LEU:  case UNLE:
3754
    case UNORDERED: case ORDERED:
3755
      /* If this produces an integer result, we know which bits are set.
3756
         Code here used to clear bits outside the mode of X, but that is
3757
         now done above.  */
3758
      /* Mind that MODE is the mode the caller wants to look at this
3759
         operation in, and not the actual operation mode.  We can wind
3760
         up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3761
         that describes the results of a vector compare.  */
3762
      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3763
          && mode_width <= HOST_BITS_PER_WIDE_INT)
3764
        nonzero = STORE_FLAG_VALUE;
3765
      break;
3766
 
3767
    case NEG:
3768
#if 0
3769
      /* Disabled to avoid exponential mutual recursion between nonzero_bits
3770
         and num_sign_bit_copies.  */
3771
      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3772
          == GET_MODE_BITSIZE (GET_MODE (x)))
3773
        nonzero = 1;
3774
#endif
3775
 
3776
      if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3777
        nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3778
      break;
3779
 
3780
    case ABS:
3781
#if 0
3782
      /* Disabled to avoid exponential mutual recursion between nonzero_bits
3783
         and num_sign_bit_copies.  */
3784
      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3785
          == GET_MODE_BITSIZE (GET_MODE (x)))
3786
        nonzero = 1;
3787
#endif
3788
      break;
3789
 
3790
    case TRUNCATE:
3791
      nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3792
                                       known_x, known_mode, known_ret)
3793
                  & GET_MODE_MASK (mode));
3794
      break;
3795
 
3796
    case ZERO_EXTEND:
3797
      nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3798
                                      known_x, known_mode, known_ret);
3799
      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3800
        nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3801
      break;
3802
 
3803
    case SIGN_EXTEND:
3804
      /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3805
         Otherwise, show all the bits in the outer mode but not the inner
3806
         may be nonzero.  */
3807
      inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3808
                                      known_x, known_mode, known_ret);
3809
      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3810
        {
3811
          inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3812
          if (inner_nz
3813
              & (((HOST_WIDE_INT) 1
3814
                  << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3815
            inner_nz |= (GET_MODE_MASK (mode)
3816
                         & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3817
        }
3818
 
3819
      nonzero &= inner_nz;
3820
      break;
3821
 
3822
    case AND:
3823
      nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3824
                                       known_x, known_mode, known_ret)
3825
                 & cached_nonzero_bits (XEXP (x, 1), mode,
3826
                                        known_x, known_mode, known_ret);
3827
      break;
3828
 
3829
    case XOR:   case IOR:
3830
    case UMIN:  case UMAX:  case SMIN:  case SMAX:
3831
      {
3832
        unsigned HOST_WIDE_INT nonzero0 =
3833
          cached_nonzero_bits (XEXP (x, 0), mode,
3834
                               known_x, known_mode, known_ret);
3835
 
3836
        /* Don't call nonzero_bits for the second time if it cannot change
3837
           anything.  */
3838
        if ((nonzero & nonzero0) != nonzero)
3839
          nonzero &= nonzero0
3840
                     | cached_nonzero_bits (XEXP (x, 1), mode,
3841
                                            known_x, known_mode, known_ret);
3842
      }
3843
      break;
3844
 
3845
    case PLUS:  case MINUS:
3846
    case MULT:
3847
    case DIV:   case UDIV:
3848
    case MOD:   case UMOD:
3849
      /* We can apply the rules of arithmetic to compute the number of
3850
         high- and low-order zero bits of these operations.  We start by
3851
         computing the width (position of the highest-order nonzero bit)
3852
         and the number of low-order zero bits for each value.  */
3853
      {
3854
        unsigned HOST_WIDE_INT nz0 =
3855
          cached_nonzero_bits (XEXP (x, 0), mode,
3856
                               known_x, known_mode, known_ret);
3857
        unsigned HOST_WIDE_INT nz1 =
3858
          cached_nonzero_bits (XEXP (x, 1), mode,
3859
                               known_x, known_mode, known_ret);
3860
        int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3861
        int width0 = floor_log2 (nz0) + 1;
3862
        int width1 = floor_log2 (nz1) + 1;
3863
        int low0 = floor_log2 (nz0 & -nz0);
3864
        int low1 = floor_log2 (nz1 & -nz1);
3865
        HOST_WIDE_INT op0_maybe_minusp
3866
          = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3867
        HOST_WIDE_INT op1_maybe_minusp
3868
          = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3869
        unsigned int result_width = mode_width;
3870
        int result_low = 0;
3871
 
3872
        switch (code)
3873
          {
3874
          case PLUS:
3875
            result_width = MAX (width0, width1) + 1;
3876
            result_low = MIN (low0, low1);
3877
            break;
3878
          case MINUS:
3879
            result_low = MIN (low0, low1);
3880
            break;
3881
          case MULT:
3882
            result_width = width0 + width1;
3883
            result_low = low0 + low1;
3884
            break;
3885
          case DIV:
3886
            if (width1 == 0)
3887
              break;
3888
            if (! op0_maybe_minusp && ! op1_maybe_minusp)
3889
              result_width = width0;
3890
            break;
3891
          case UDIV:
3892
            if (width1 == 0)
3893
              break;
3894
            result_width = width0;
3895
            break;
3896
          case MOD:
3897
            if (width1 == 0)
3898
              break;
3899
            if (! op0_maybe_minusp && ! op1_maybe_minusp)
3900
              result_width = MIN (width0, width1);
3901
            result_low = MIN (low0, low1);
3902
            break;
3903
          case UMOD:
3904
            if (width1 == 0)
3905
              break;
3906
            result_width = MIN (width0, width1);
3907
            result_low = MIN (low0, low1);
3908
            break;
3909
          default:
3910
            gcc_unreachable ();
3911
          }
3912
 
3913
        if (result_width < mode_width)
3914
          nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3915
 
3916
        if (result_low > 0)
3917
          nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3918
 
3919
#ifdef POINTERS_EXTEND_UNSIGNED
3920
        /* If pointers extend unsigned and this is an addition or subtraction
3921
           to a pointer in Pmode, all the bits above ptr_mode are known to be
3922
           zero.  */
3923
        if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3924
            && (code == PLUS || code == MINUS)
3925
            && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3926
          nonzero &= GET_MODE_MASK (ptr_mode);
3927
#endif
3928
      }
3929
      break;
3930
 
3931
    case ZERO_EXTRACT:
3932
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
3933
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3934
        nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3935
      break;
3936
 
3937
    case SUBREG:
3938
      /* If this is a SUBREG formed for a promoted variable that has
3939
         been zero-extended, we know that at least the high-order bits
3940
         are zero, though others might be too.  */
3941
 
3942
      if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3943
        nonzero = GET_MODE_MASK (GET_MODE (x))
3944
                  & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3945
                                         known_x, known_mode, known_ret);
3946
 
3947
      /* If the inner mode is a single word for both the host and target
3948
         machines, we can compute this from which bits of the inner
3949
         object might be nonzero.  */
3950
      if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3951
          && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3952
              <= HOST_BITS_PER_WIDE_INT))
3953
        {
3954
          nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3955
                                          known_x, known_mode, known_ret);
3956
 
3957
#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3958
          /* If this is a typical RISC machine, we only have to worry
3959
             about the way loads are extended.  */
3960
          if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3961
               ? (((nonzero
3962
                    & (((unsigned HOST_WIDE_INT) 1
3963
                        << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3964
                   != 0))
3965
               : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3966
              || !MEM_P (SUBREG_REG (x)))
3967
#endif
3968
            {
3969
              /* On many CISC machines, accessing an object in a wider mode
3970
                 causes the high-order bits to become undefined.  So they are
3971
                 not known to be zero.  */
3972
              if (GET_MODE_SIZE (GET_MODE (x))
3973
                  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3974
                nonzero |= (GET_MODE_MASK (GET_MODE (x))
3975
                            & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3976
            }
3977
        }
3978
      break;
3979
 
3980
    case ASHIFTRT:
3981
    case LSHIFTRT:
3982
    case ASHIFT:
3983
    case ROTATE:
3984
      /* The nonzero bits are in two classes: any bits within MODE
3985
         that aren't in GET_MODE (x) are always significant.  The rest of the
3986
         nonzero bits are those that are significant in the operand of
3987
         the shift when shifted the appropriate number of bits.  This
3988
         shows that high-order bits are cleared by the right shift and
3989
         low-order bits by left shifts.  */
3990
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
3991
          && INTVAL (XEXP (x, 1)) >= 0
3992
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3993
        {
3994
          enum machine_mode inner_mode = GET_MODE (x);
3995
          unsigned int width = GET_MODE_BITSIZE (inner_mode);
3996
          int count = INTVAL (XEXP (x, 1));
3997
          unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3998
          unsigned HOST_WIDE_INT op_nonzero =
3999
            cached_nonzero_bits (XEXP (x, 0), mode,
4000
                                 known_x, known_mode, known_ret);
4001
          unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4002
          unsigned HOST_WIDE_INT outer = 0;
4003
 
4004
          if (mode_width > width)
4005
            outer = (op_nonzero & nonzero & ~mode_mask);
4006
 
4007
          if (code == LSHIFTRT)
4008
            inner >>= count;
4009
          else if (code == ASHIFTRT)
4010
            {
4011
              inner >>= count;
4012
 
4013
              /* If the sign bit may have been nonzero before the shift, we
4014
                 need to mark all the places it could have been copied to
4015
                 by the shift as possibly nonzero.  */
4016
              if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
4017
                inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
4018
            }
4019
          else if (code == ASHIFT)
4020
            inner <<= count;
4021
          else
4022
            inner = ((inner << (count % width)
4023
                      | (inner >> (width - (count % width)))) & mode_mask);
4024
 
4025
          nonzero &= (outer | inner);
4026
        }
4027
      break;
4028
 
4029
    case FFS:
4030
    case POPCOUNT:
4031
      /* This is at most the number of bits in the mode.  */
4032
      nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4033
      break;
4034
 
4035
    case CLZ:
4036
      /* If CLZ has a known value at zero, then the nonzero bits are
4037
         that value, plus the number of bits in the mode minus one.  */
4038
      if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4039
        nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4040
      else
4041
        nonzero = -1;
4042
      break;
4043
 
4044
    case CTZ:
4045
      /* If CTZ has a known value at zero, then the nonzero bits are
4046
         that value, plus the number of bits in the mode minus one.  */
4047
      if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4048
        nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4049
      else
4050
        nonzero = -1;
4051
      break;
4052
 
4053
    case PARITY:
4054
      nonzero = 1;
4055
      break;
4056
 
4057
    case IF_THEN_ELSE:
4058
      {
4059
        unsigned HOST_WIDE_INT nonzero_true =
4060
          cached_nonzero_bits (XEXP (x, 1), mode,
4061
                               known_x, known_mode, known_ret);
4062
 
4063
        /* Don't call nonzero_bits for the second time if it cannot change
4064
           anything.  */
4065
        if ((nonzero & nonzero_true) != nonzero)
4066
          nonzero &= nonzero_true
4067
                     | cached_nonzero_bits (XEXP (x, 2), mode,
4068
                                            known_x, known_mode, known_ret);
4069
      }
4070
      break;
4071
 
4072
    default:
4073
      break;
4074
    }
4075
 
4076
  return nonzero;
4077
}
4078
 
4079
/* See the macro definition above.  */
4080
#undef cached_num_sign_bit_copies
4081
 
4082
 
4083
/* The function cached_num_sign_bit_copies is a wrapper around
4084
   num_sign_bit_copies1.  It avoids exponential behavior in
4085
   num_sign_bit_copies1 when X has identical subexpressions on the
4086
   first or the second level.  */
4087
 
4088
static unsigned int
4089
cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
4090
                            enum machine_mode known_mode,
4091
                            unsigned int known_ret)
4092
{
4093
  if (x == known_x && mode == known_mode)
4094
    return known_ret;
4095
 
4096
  /* Try to find identical subexpressions.  If found call
4097
     num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4098
     the precomputed value for the subexpression as KNOWN_RET.  */
4099
 
4100
  if (ARITHMETIC_P (x))
4101
    {
4102
      rtx x0 = XEXP (x, 0);
4103
      rtx x1 = XEXP (x, 1);
4104
 
4105
      /* Check the first level.  */
4106
      if (x0 == x1)
4107
        return
4108
          num_sign_bit_copies1 (x, mode, x0, mode,
4109
                                cached_num_sign_bit_copies (x0, mode, known_x,
4110
                                                            known_mode,
4111
                                                            known_ret));
4112
 
4113
      /* Check the second level.  */
4114
      if (ARITHMETIC_P (x0)
4115
          && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4116
        return
4117
          num_sign_bit_copies1 (x, mode, x1, mode,
4118
                                cached_num_sign_bit_copies (x1, mode, known_x,
4119
                                                            known_mode,
4120
                                                            known_ret));
4121
 
4122
      if (ARITHMETIC_P (x1)
4123
          && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4124
        return
4125
          num_sign_bit_copies1 (x, mode, x0, mode,
4126
                                cached_num_sign_bit_copies (x0, mode, known_x,
4127
                                                            known_mode,
4128
                                                            known_ret));
4129
    }
4130
 
4131
  return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4132
}
4133
 
4134
/* Return the number of bits at the high-order end of X that are known to
4135
   be equal to the sign bit.  X will be used in mode MODE; if MODE is
4136
   VOIDmode, X will be used in its own mode.  The returned value  will always
4137
   be between 1 and the number of bits in MODE.  */
4138
 
4139
static unsigned int
4140
num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
4141
                      enum machine_mode known_mode,
4142
                      unsigned int known_ret)
4143
{
4144
  enum rtx_code code = GET_CODE (x);
4145
  unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4146
  int num0, num1, result;
4147
  unsigned HOST_WIDE_INT nonzero;
4148
 
4149
  /* If we weren't given a mode, use the mode of X.  If the mode is still
4150
     VOIDmode, we don't know anything.  Likewise if one of the modes is
4151
     floating-point.  */
4152
 
4153
  if (mode == VOIDmode)
4154
    mode = GET_MODE (x);
4155
 
4156
  if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4157
    return 1;
4158
 
4159
  /* For a smaller object, just ignore the high bits.  */
4160
  if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4161
    {
4162
      num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4163
                                         known_x, known_mode, known_ret);
4164
      return MAX (1,
4165
                  num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4166
    }
4167
 
4168
  if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4169
    {
4170
#ifndef WORD_REGISTER_OPERATIONS
4171
  /* If this machine does not do all register operations on the entire
4172
     register and MODE is wider than the mode of X, we can say nothing
4173
     at all about the high-order bits.  */
4174
      return 1;
4175
#else
4176
      /* Likewise on machines that do, if the mode of the object is smaller
4177
         than a word and loads of that size don't sign extend, we can say
4178
         nothing about the high order bits.  */
4179
      if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4180
#ifdef LOAD_EXTEND_OP
4181
          && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4182
#endif
4183
          )
4184
        return 1;
4185
#endif
4186
    }
4187
 
4188
  switch (code)
4189
    {
4190
    case REG:
4191
 
4192
#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4193
      /* If pointers extend signed and this is a pointer in Pmode, say that
4194
         all the bits above ptr_mode are known to be sign bit copies.  */
4195
      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4196
          && REG_POINTER (x))
4197
        return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4198
#endif
4199
 
4200
      {
4201
        unsigned int copies_for_hook = 1, copies = 1;
4202
        rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4203
                                                     known_mode, known_ret,
4204
                                                     &copies_for_hook);
4205
 
4206
        if (new)
4207
          copies = cached_num_sign_bit_copies (new, mode, known_x,
4208
                                               known_mode, known_ret);
4209
 
4210
        if (copies > 1 || copies_for_hook > 1)
4211
          return MAX (copies, copies_for_hook);
4212
 
4213
        /* Else, use nonzero_bits to guess num_sign_bit_copies (see below).  */
4214
      }
4215
      break;
4216
 
4217
    case MEM:
4218
#ifdef LOAD_EXTEND_OP
4219
      /* Some RISC machines sign-extend all loads of smaller than a word.  */
4220
      if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4221
        return MAX (1, ((int) bitwidth
4222
                        - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4223
#endif
4224
      break;
4225
 
4226
    case CONST_INT:
4227
      /* If the constant is negative, take its 1's complement and remask.
4228
         Then see how many zero bits we have.  */
4229
      nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4230
      if (bitwidth <= HOST_BITS_PER_WIDE_INT
4231
          && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4232
        nonzero = (~nonzero) & GET_MODE_MASK (mode);
4233
 
4234
      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4235
 
4236
    case SUBREG:
4237
      /* If this is a SUBREG for a promoted object that is sign-extended
4238
         and we are looking at it in a wider mode, we know that at least the
4239
         high-order bits are known to be sign bit copies.  */
4240
 
4241
      if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4242
        {
4243
          num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4244
                                             known_x, known_mode, known_ret);
4245
          return MAX ((int) bitwidth
4246
                      - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4247
                      num0);
4248
        }
4249
 
4250
      /* For a smaller object, just ignore the high bits.  */
4251
      if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4252
        {
4253
          num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4254
                                             known_x, known_mode, known_ret);
4255
          return MAX (1, (num0
4256
                          - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4257
                                   - bitwidth)));
4258
        }
4259
 
4260
#ifdef WORD_REGISTER_OPERATIONS
4261
#ifdef LOAD_EXTEND_OP
4262
      /* For paradoxical SUBREGs on machines where all register operations
4263
         affect the entire register, just look inside.  Note that we are
4264
         passing MODE to the recursive call, so the number of sign bit copies
4265
         will remain relative to that mode, not the inner mode.  */
4266
 
4267
      /* This works only if loads sign extend.  Otherwise, if we get a
4268
         reload for the inner part, it may be loaded from the stack, and
4269
         then we lose all sign bit copies that existed before the store
4270
         to the stack.  */
4271
 
4272
      if ((GET_MODE_SIZE (GET_MODE (x))
4273
           > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4274
          && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4275
          && MEM_P (SUBREG_REG (x)))
4276
        return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4277
                                           known_x, known_mode, known_ret);
4278
#endif
4279
#endif
4280
      break;
4281
 
4282
    case SIGN_EXTRACT:
4283
      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4284
        return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4285
      break;
4286
 
4287
    case SIGN_EXTEND:
4288
      return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4289
              + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4290
                                            known_x, known_mode, known_ret));
4291
 
4292
    case TRUNCATE:
4293
      /* For a smaller object, just ignore the high bits.  */
4294
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4295
                                         known_x, known_mode, known_ret);
4296
      return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4297
                                    - bitwidth)));
4298
 
4299
    case NOT:
4300
      return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4301
                                         known_x, known_mode, known_ret);
4302
 
4303
    case ROTATE:       case ROTATERT:
4304
      /* If we are rotating left by a number of bits less than the number
4305
         of sign bit copies, we can just subtract that amount from the
4306
         number.  */
4307
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
4308
          && INTVAL (XEXP (x, 1)) >= 0
4309
          && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4310
        {
4311
          num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4312
                                             known_x, known_mode, known_ret);
4313
          return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4314
                                 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4315
        }
4316
      break;
4317
 
4318
    case NEG:
4319
      /* In general, this subtracts one sign bit copy.  But if the value
4320
         is known to be positive, the number of sign bit copies is the
4321
         same as that of the input.  Finally, if the input has just one bit
4322
         that might be nonzero, all the bits are copies of the sign bit.  */
4323
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4324
                                         known_x, known_mode, known_ret);
4325
      if (bitwidth > HOST_BITS_PER_WIDE_INT)
4326
        return num0 > 1 ? num0 - 1 : 1;
4327
 
4328
      nonzero = nonzero_bits (XEXP (x, 0), mode);
4329
      if (nonzero == 1)
4330
        return bitwidth;
4331
 
4332
      if (num0 > 1
4333
          && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4334
        num0--;
4335
 
4336
      return num0;
4337
 
4338
    case IOR:   case AND:   case XOR:
4339
    case SMIN:  case SMAX:  case UMIN:  case UMAX:
4340
      /* Logical operations will preserve the number of sign-bit copies.
4341
         MIN and MAX operations always return one of the operands.  */
4342
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4343
                                         known_x, known_mode, known_ret);
4344
      num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4345
                                         known_x, known_mode, known_ret);
4346
      return MIN (num0, num1);
4347
 
4348
    case PLUS:  case MINUS:
4349
      /* For addition and subtraction, we can have a 1-bit carry.  However,
4350
         if we are subtracting 1 from a positive number, there will not
4351
         be such a carry.  Furthermore, if the positive number is known to
4352
         be 0 or 1, we know the result is either -1 or 0.  */
4353
 
4354
      if (code == PLUS && XEXP (x, 1) == constm1_rtx
4355
          && bitwidth <= HOST_BITS_PER_WIDE_INT)
4356
        {
4357
          nonzero = nonzero_bits (XEXP (x, 0), mode);
4358
          if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4359
            return (nonzero == 1 || nonzero == 0 ? bitwidth
4360
                    : bitwidth - floor_log2 (nonzero) - 1);
4361
        }
4362
 
4363
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4364
                                         known_x, known_mode, known_ret);
4365
      num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4366
                                         known_x, known_mode, known_ret);
4367
      result = MAX (1, MIN (num0, num1) - 1);
4368
 
4369
#ifdef POINTERS_EXTEND_UNSIGNED
4370
      /* If pointers extend signed and this is an addition or subtraction
4371
         to a pointer in Pmode, all the bits above ptr_mode are known to be
4372
         sign bit copies.  */
4373
      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4374
          && (code == PLUS || code == MINUS)
4375
          && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4376
        result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4377
                             - GET_MODE_BITSIZE (ptr_mode) + 1),
4378
                      result);
4379
#endif
4380
      return result;
4381
 
4382
    case MULT:
4383
      /* The number of bits of the product is the sum of the number of
4384
         bits of both terms.  However, unless one of the terms if known
4385
         to be positive, we must allow for an additional bit since negating
4386
         a negative number can remove one sign bit copy.  */
4387
 
4388
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4389
                                         known_x, known_mode, known_ret);
4390
      num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4391
                                         known_x, known_mode, known_ret);
4392
 
4393
      result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4394
      if (result > 0
4395
          && (bitwidth > HOST_BITS_PER_WIDE_INT
4396
              || (((nonzero_bits (XEXP (x, 0), mode)
4397
                    & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4398
                  && ((nonzero_bits (XEXP (x, 1), mode)
4399
                       & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4400
        result--;
4401
 
4402
      return MAX (1, result);
4403
 
4404
    case UDIV:
4405
      /* The result must be <= the first operand.  If the first operand
4406
         has the high bit set, we know nothing about the number of sign
4407
         bit copies.  */
4408
      if (bitwidth > HOST_BITS_PER_WIDE_INT)
4409
        return 1;
4410
      else if ((nonzero_bits (XEXP (x, 0), mode)
4411
                & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4412
        return 1;
4413
      else
4414
        return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4415
                                           known_x, known_mode, known_ret);
4416
 
4417
    case UMOD:
4418
      /* The result must be <= the second operand.  */
4419
      return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4420
                                           known_x, known_mode, known_ret);
4421
 
4422
    case DIV:
4423
      /* Similar to unsigned division, except that we have to worry about
4424
         the case where the divisor is negative, in which case we have
4425
         to add 1.  */
4426
      result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4427
                                           known_x, known_mode, known_ret);
4428
      if (result > 1
4429
          && (bitwidth > HOST_BITS_PER_WIDE_INT
4430
              || (nonzero_bits (XEXP (x, 1), mode)
4431
                  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4432
        result--;
4433
 
4434
      return result;
4435
 
4436
    case MOD:
4437
      result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4438
                                           known_x, known_mode, known_ret);
4439
      if (result > 1
4440
          && (bitwidth > HOST_BITS_PER_WIDE_INT
4441
              || (nonzero_bits (XEXP (x, 1), mode)
4442
                  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4443
        result--;
4444
 
4445
      return result;
4446
 
4447
    case ASHIFTRT:
4448
      /* Shifts by a constant add to the number of bits equal to the
4449
         sign bit.  */
4450
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4451
                                         known_x, known_mode, known_ret);
4452
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
4453
          && INTVAL (XEXP (x, 1)) > 0)
4454
        num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4455
 
4456
      return num0;
4457
 
4458
    case ASHIFT:
4459
      /* Left shifts destroy copies.  */
4460
      if (GET_CODE (XEXP (x, 1)) != CONST_INT
4461
          || INTVAL (XEXP (x, 1)) < 0
4462
          || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4463
        return 1;
4464
 
4465
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4466
                                         known_x, known_mode, known_ret);
4467
      return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4468
 
4469
    case IF_THEN_ELSE:
4470
      num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4471
                                         known_x, known_mode, known_ret);
4472
      num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4473
                                         known_x, known_mode, known_ret);
4474
      return MIN (num0, num1);
4475
 
4476
    case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
4477
    case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
4478
    case GEU: case GTU: case LEU: case LTU:
4479
    case UNORDERED: case ORDERED:
4480
      /* If the constant is negative, take its 1's complement and remask.
4481
         Then see how many zero bits we have.  */
4482
      nonzero = STORE_FLAG_VALUE;
4483
      if (bitwidth <= HOST_BITS_PER_WIDE_INT
4484
          && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4485
        nonzero = (~nonzero) & GET_MODE_MASK (mode);
4486
 
4487
      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4488
 
4489
    default:
4490
      break;
4491
    }
4492
 
4493
  /* If we haven't been able to figure it out by one of the above rules,
4494
     see if some of the high-order bits are known to be zero.  If so,
4495
     count those bits and return one less than that amount.  If we can't
4496
     safely compute the mask for this mode, always return BITWIDTH.  */
4497
 
4498
  bitwidth = GET_MODE_BITSIZE (mode);
4499
  if (bitwidth > HOST_BITS_PER_WIDE_INT)
4500
    return 1;
4501
 
4502
  nonzero = nonzero_bits (x, mode);
4503
  return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4504
         ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4505
}
4506
 
4507
/* Calculate the rtx_cost of a single instruction.  A return value of
4508
   zero indicates an instruction pattern without a known cost.  */
4509
 
4510
int
4511
insn_rtx_cost (rtx pat)
4512
{
4513
  int i, cost;
4514
  rtx set;
4515
 
4516
  /* Extract the single set rtx from the instruction pattern.
4517
     We can't use single_set since we only have the pattern.  */
4518
  if (GET_CODE (pat) == SET)
4519
    set = pat;
4520
  else if (GET_CODE (pat) == PARALLEL)
4521
    {
4522
      set = NULL_RTX;
4523
      for (i = 0; i < XVECLEN (pat, 0); i++)
4524
        {
4525
          rtx x = XVECEXP (pat, 0, i);
4526
          if (GET_CODE (x) == SET)
4527
            {
4528
              if (set)
4529
                return 0;
4530
              set = x;
4531
            }
4532
        }
4533
      if (!set)
4534
        return 0;
4535
    }
4536
  else
4537
    return 0;
4538
 
4539
  cost = rtx_cost (SET_SRC (set), SET);
4540
  return cost > 0 ? cost : COSTS_N_INSNS (1);
4541
}
4542
 
4543
/* Given an insn INSN and condition COND, return the condition in a
4544
   canonical form to simplify testing by callers.  Specifically:
4545
 
4546
   (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4547
   (2) Both operands will be machine operands; (cc0) will have been replaced.
4548
   (3) If an operand is a constant, it will be the second operand.
4549
   (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4550
       for GE, GEU, and LEU.
4551
 
4552
   If the condition cannot be understood, or is an inequality floating-point
4553
   comparison which needs to be reversed, 0 will be returned.
4554
 
4555
   If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4556
 
4557
   If EARLIEST is nonzero, it is a pointer to a place where the earliest
4558
   insn used in locating the condition was found.  If a replacement test
4559
   of the condition is desired, it should be placed in front of that
4560
   insn and we will be sure that the inputs are still valid.
4561
 
4562
   If WANT_REG is nonzero, we wish the condition to be relative to that
4563
   register, if possible.  Therefore, do not canonicalize the condition
4564
   further.  If ALLOW_CC_MODE is nonzero, allow the condition returned
4565
   to be a compare to a CC mode register.
4566
 
4567
   If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4568
   and at INSN.  */
4569
 
4570
rtx
4571
canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4572
                        rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4573
{
4574
  enum rtx_code code;
4575
  rtx prev = insn;
4576
  rtx set;
4577
  rtx tem;
4578
  rtx op0, op1;
4579
  int reverse_code = 0;
4580
  enum machine_mode mode;
4581
 
4582
  code = GET_CODE (cond);
4583
  mode = GET_MODE (cond);
4584
  op0 = XEXP (cond, 0);
4585
  op1 = XEXP (cond, 1);
4586
 
4587
  if (reverse)
4588
    code = reversed_comparison_code (cond, insn);
4589
  if (code == UNKNOWN)
4590
    return 0;
4591
 
4592
  if (earliest)
4593
    *earliest = insn;
4594
 
4595
  /* If we are comparing a register with zero, see if the register is set
4596
     in the previous insn to a COMPARE or a comparison operation.  Perform
4597
     the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4598
     in cse.c  */
4599
 
4600
  while ((GET_RTX_CLASS (code) == RTX_COMPARE
4601
          || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4602
         && op1 == CONST0_RTX (GET_MODE (op0))
4603
         && op0 != want_reg)
4604
    {
4605
      /* Set nonzero when we find something of interest.  */
4606
      rtx x = 0;
4607
 
4608
#ifdef HAVE_cc0
4609
      /* If comparison with cc0, import actual comparison from compare
4610
         insn.  */
4611
      if (op0 == cc0_rtx)
4612
        {
4613
          if ((prev = prev_nonnote_insn (prev)) == 0
4614
              || !NONJUMP_INSN_P (prev)
4615
              || (set = single_set (prev)) == 0
4616
              || SET_DEST (set) != cc0_rtx)
4617
            return 0;
4618
 
4619
          op0 = SET_SRC (set);
4620
          op1 = CONST0_RTX (GET_MODE (op0));
4621
          if (earliest)
4622
            *earliest = prev;
4623
        }
4624
#endif
4625
 
4626
      /* If this is a COMPARE, pick up the two things being compared.  */
4627
      if (GET_CODE (op0) == COMPARE)
4628
        {
4629
          op1 = XEXP (op0, 1);
4630
          op0 = XEXP (op0, 0);
4631
          continue;
4632
        }
4633
      else if (!REG_P (op0))
4634
        break;
4635
 
4636
      /* Go back to the previous insn.  Stop if it is not an INSN.  We also
4637
         stop if it isn't a single set or if it has a REG_INC note because
4638
         we don't want to bother dealing with it.  */
4639
 
4640
      if ((prev = prev_nonnote_insn (prev)) == 0
4641
          || !NONJUMP_INSN_P (prev)
4642
          || FIND_REG_INC_NOTE (prev, NULL_RTX))
4643
        break;
4644
 
4645
      set = set_of (op0, prev);
4646
 
4647
      if (set
4648
          && (GET_CODE (set) != SET
4649
              || !rtx_equal_p (SET_DEST (set), op0)))
4650
        break;
4651
 
4652
      /* If this is setting OP0, get what it sets it to if it looks
4653
         relevant.  */
4654
      if (set)
4655
        {
4656
          enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4657
#ifdef FLOAT_STORE_FLAG_VALUE
4658
          REAL_VALUE_TYPE fsfv;
4659
#endif
4660
 
4661
          /* ??? We may not combine comparisons done in a CCmode with
4662
             comparisons not done in a CCmode.  This is to aid targets
4663
             like Alpha that have an IEEE compliant EQ instruction, and
4664
             a non-IEEE compliant BEQ instruction.  The use of CCmode is
4665
             actually artificial, simply to prevent the combination, but
4666
             should not affect other platforms.
4667
 
4668
             However, we must allow VOIDmode comparisons to match either
4669
             CCmode or non-CCmode comparison, because some ports have
4670
             modeless comparisons inside branch patterns.
4671
 
4672
             ??? This mode check should perhaps look more like the mode check
4673
             in simplify_comparison in combine.  */
4674
 
4675
          if ((GET_CODE (SET_SRC (set)) == COMPARE
4676
               || (((code == NE
4677
                     || (code == LT
4678
                         && GET_MODE_CLASS (inner_mode) == MODE_INT
4679
                         && (GET_MODE_BITSIZE (inner_mode)
4680
                             <= HOST_BITS_PER_WIDE_INT)
4681
                         && (STORE_FLAG_VALUE
4682
                             & ((HOST_WIDE_INT) 1
4683
                                << (GET_MODE_BITSIZE (inner_mode) - 1))))
4684
#ifdef FLOAT_STORE_FLAG_VALUE
4685
                     || (code == LT
4686
                         && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
4687
                         && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4688
                             REAL_VALUE_NEGATIVE (fsfv)))
4689
#endif
4690
                     ))
4691
                   && COMPARISON_P (SET_SRC (set))))
4692
              && (((GET_MODE_CLASS (mode) == MODE_CC)
4693
                   == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4694
                  || mode == VOIDmode || inner_mode == VOIDmode))
4695
            x = SET_SRC (set);
4696
          else if (((code == EQ
4697
                     || (code == GE
4698
                         && (GET_MODE_BITSIZE (inner_mode)
4699
                             <= HOST_BITS_PER_WIDE_INT)
4700
                         && GET_MODE_CLASS (inner_mode) == MODE_INT
4701
                         && (STORE_FLAG_VALUE
4702
                             & ((HOST_WIDE_INT) 1
4703
                                << (GET_MODE_BITSIZE (inner_mode) - 1))))
4704
#ifdef FLOAT_STORE_FLAG_VALUE
4705
                     || (code == GE
4706
                         && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
4707
                         && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4708
                             REAL_VALUE_NEGATIVE (fsfv)))
4709
#endif
4710
                     ))
4711
                   && COMPARISON_P (SET_SRC (set))
4712
                   && (((GET_MODE_CLASS (mode) == MODE_CC)
4713
                        == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4714
                       || mode == VOIDmode || inner_mode == VOIDmode))
4715
 
4716
            {
4717
              reverse_code = 1;
4718
              x = SET_SRC (set);
4719
            }
4720
          else
4721
            break;
4722
        }
4723
 
4724
      else if (reg_set_p (op0, prev))
4725
        /* If this sets OP0, but not directly, we have to give up.  */
4726
        break;
4727
 
4728
      if (x)
4729
        {
4730
          /* If the caller is expecting the condition to be valid at INSN,
4731
             make sure X doesn't change before INSN.  */
4732
          if (valid_at_insn_p)
4733
            if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4734
              break;
4735
          if (COMPARISON_P (x))
4736
            code = GET_CODE (x);
4737
          if (reverse_code)
4738
            {
4739
              code = reversed_comparison_code (x, prev);
4740
              if (code == UNKNOWN)
4741
                return 0;
4742
              reverse_code = 0;
4743
            }
4744
 
4745
          op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4746
          if (earliest)
4747
            *earliest = prev;
4748
        }
4749
    }
4750
 
4751
  /* If constant is first, put it last.  */
4752
  if (CONSTANT_P (op0))
4753
    code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4754
 
4755
  /* If OP0 is the result of a comparison, we weren't able to find what
4756
     was really being compared, so fail.  */
4757
  if (!allow_cc_mode
4758
      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4759
    return 0;
4760
 
4761
  /* Canonicalize any ordered comparison with integers involving equality
4762
     if we can do computations in the relevant mode and we do not
4763
     overflow.  */
4764
 
4765
  if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4766
      && GET_CODE (op1) == CONST_INT
4767
      && GET_MODE (op0) != VOIDmode
4768
      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4769
    {
4770
      HOST_WIDE_INT const_val = INTVAL (op1);
4771
      unsigned HOST_WIDE_INT uconst_val = const_val;
4772
      unsigned HOST_WIDE_INT max_val
4773
        = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4774
 
4775
      switch (code)
4776
        {
4777
        case LE:
4778
          if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4779
            code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4780
          break;
4781
 
4782
        /* When cross-compiling, const_val might be sign-extended from
4783
           BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4784
        case GE:
4785
          if ((HOST_WIDE_INT) (const_val & max_val)
4786
              != (((HOST_WIDE_INT) 1
4787
                   << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4788
            code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4789
          break;
4790
 
4791
        case LEU:
4792
          if (uconst_val < max_val)
4793
            code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4794
          break;
4795
 
4796
        case GEU:
4797
          if (uconst_val != 0)
4798
            code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4799
          break;
4800
 
4801
        default:
4802
          break;
4803
        }
4804
    }
4805
 
4806
  /* Never return CC0; return zero instead.  */
4807
  if (CC0_P (op0))
4808
    return 0;
4809
 
4810
  return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4811
}
4812
 
4813
/* Given a jump insn JUMP, return the condition that will cause it to branch
4814
   to its JUMP_LABEL.  If the condition cannot be understood, or is an
4815
   inequality floating-point comparison which needs to be reversed, 0 will
4816
   be returned.
4817
 
4818
   If EARLIEST is nonzero, it is a pointer to a place where the earliest
4819
   insn used in locating the condition was found.  If a replacement test
4820
   of the condition is desired, it should be placed in front of that
4821
   insn and we will be sure that the inputs are still valid.  If EARLIEST
4822
   is null, the returned condition will be valid at INSN.
4823
 
4824
   If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4825
   compare CC mode register.
4826
 
4827
   VALID_AT_INSN_P is the same as for canonicalize_condition.  */
4828
 
4829
rtx
4830
get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4831
{
4832
  rtx cond;
4833
  int reverse;
4834
  rtx set;
4835
 
4836
  /* If this is not a standard conditional jump, we can't parse it.  */
4837
  if (!JUMP_P (jump)
4838
      || ! any_condjump_p (jump))
4839
    return 0;
4840
  set = pc_set (jump);
4841
 
4842
  cond = XEXP (SET_SRC (set), 0);
4843
 
4844
  /* If this branches to JUMP_LABEL when the condition is false, reverse
4845
     the condition.  */
4846
  reverse
4847
    = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4848
      && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4849
 
4850
  return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4851
                                 allow_cc_mode, valid_at_insn_p);
4852
}
4853
 
4854
 
4855
/* Initialize non_rtx_starting_operands, which is used to speed up
4856
   for_each_rtx.  */
4857
void
4858
init_rtlanal (void)
4859
{
4860
  int i;
4861
  for (i = 0; i < NUM_RTX_CODE; i++)
4862
    {
4863
      const char *format = GET_RTX_FORMAT (i);
4864
      const char *first = strpbrk (format, "eEV");
4865
      non_rtx_starting_operands[i] = first ? first - format : -1;
4866
    }
4867
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.