OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [rtlanal.c] - Blame information for rev 749

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Analyze RTL for GNU compiler.
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4
   2011 Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
 
23
#include "config.h"
24
#include "system.h"
25
#include "coretypes.h"
26
#include "tm.h"
27
#include "diagnostic-core.h"
28
#include "hard-reg-set.h"
29
#include "rtl.h"
30
#include "insn-config.h"
31
#include "recog.h"
32
#include "target.h"
33
#include "output.h"
34
#include "tm_p.h"
35
#include "flags.h"
36
#include "regs.h"
37
#include "function.h"
38
#include "df.h"
39
#include "tree.h"
40
#include "emit-rtl.h"  /* FIXME: Can go away once crtl is moved to rtl.h.  */
41
 
42
/* Forward declarations */
43
static void set_of_1 (rtx, const_rtx, void *);
44
static bool covers_regno_p (const_rtx, unsigned int);
45
static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
46
static int rtx_referenced_p_1 (rtx *, void *);
47
static int computed_jump_p_1 (const_rtx);
48
static void parms_set (rtx, const_rtx, void *);
49
 
50
static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
51
                                                   const_rtx, enum machine_mode,
52
                                                   unsigned HOST_WIDE_INT);
53
static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
54
                                             const_rtx, enum machine_mode,
55
                                             unsigned HOST_WIDE_INT);
56
static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
57
                                                enum machine_mode,
58
                                                unsigned int);
59
static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
60
                                          enum machine_mode, unsigned int);
61
 
62
/* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
63
   -1 if a code has no such operand.  */
64
static int non_rtx_starting_operands[NUM_RTX_CODE];
65
 
66
/* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
67
   If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
68
   SIGN_EXTEND then while narrowing we also have to enforce the
69
   representation and sign-extend the value to mode DESTINATION_REP.
70
 
71
   If the value is already sign-extended to DESTINATION_REP mode we
72
   can just switch to DESTINATION mode on it.  For each pair of
73
   integral modes SOURCE and DESTINATION, when truncating from SOURCE
74
   to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
75
   contains the number of high-order bits in SOURCE that have to be
76
   copies of the sign-bit so that we can do this mode-switch to
77
   DESTINATION.  */
78
 
79
static unsigned int
80
num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
81
 
82
/* Return 1 if the value of X is unstable
83
   (would be different at a different point in the program).
84
   The frame pointer, arg pointer, etc. are considered stable
85
   (within one function) and so is anything marked `unchanging'.  */
86
 
87
int
88
rtx_unstable_p (const_rtx x)
89
{
90
  const RTX_CODE code = GET_CODE (x);
91
  int i;
92
  const char *fmt;
93
 
94
  switch (code)
95
    {
96
    case MEM:
97
      return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
98
 
99
    case CONST:
100
    case CONST_INT:
101
    case CONST_DOUBLE:
102
    case CONST_FIXED:
103
    case CONST_VECTOR:
104
    case SYMBOL_REF:
105
    case LABEL_REF:
106
      return 0;
107
 
108
    case REG:
109
      /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
110
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
111
          /* The arg pointer varies if it is not a fixed register.  */
112
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
113
        return 0;
114
      /* ??? When call-clobbered, the value is stable modulo the restore
115
         that must happen after a call.  This currently screws up local-alloc
116
         into believing that the restore is not needed.  */
117
      if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx)
118
        return 0;
119
      return 1;
120
 
121
    case ASM_OPERANDS:
122
      if (MEM_VOLATILE_P (x))
123
        return 1;
124
 
125
      /* Fall through.  */
126
 
127
    default:
128
      break;
129
    }
130
 
131
  fmt = GET_RTX_FORMAT (code);
132
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
133
    if (fmt[i] == 'e')
134
      {
135
        if (rtx_unstable_p (XEXP (x, i)))
136
          return 1;
137
      }
138
    else if (fmt[i] == 'E')
139
      {
140
        int j;
141
        for (j = 0; j < XVECLEN (x, i); j++)
142
          if (rtx_unstable_p (XVECEXP (x, i, j)))
143
            return 1;
144
      }
145
 
146
  return 0;
147
}
148
 
149
/* Return 1 if X has a value that can vary even between two
150
   executions of the program.  0 means X can be compared reliably
151
   against certain constants or near-constants.
152
   FOR_ALIAS is nonzero if we are called from alias analysis; if it is
153
   zero, we are slightly more conservative.
154
   The frame pointer and the arg pointer are considered constant.  */
155
 
156
bool
157
rtx_varies_p (const_rtx x, bool for_alias)
158
{
159
  RTX_CODE code;
160
  int i;
161
  const char *fmt;
162
 
163
  if (!x)
164
    return 0;
165
 
166
  code = GET_CODE (x);
167
  switch (code)
168
    {
169
    case MEM:
170
      return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
171
 
172
    case CONST:
173
    case CONST_INT:
174
    case CONST_DOUBLE:
175
    case CONST_FIXED:
176
    case CONST_VECTOR:
177
    case SYMBOL_REF:
178
    case LABEL_REF:
179
      return 0;
180
 
181
    case REG:
182
      /* Note that we have to test for the actual rtx used for the frame
183
         and arg pointers and not just the register number in case we have
184
         eliminated the frame and/or arg pointer and are using it
185
         for pseudos.  */
186
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
187
          /* The arg pointer varies if it is not a fixed register.  */
188
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
189
        return 0;
190
      if (x == pic_offset_table_rtx
191
          /* ??? When call-clobbered, the value is stable modulo the restore
192
             that must happen after a call.  This currently screws up
193
             local-alloc into believing that the restore is not needed, so we
194
             must return 0 only if we are called from alias analysis.  */
195
          && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias))
196
        return 0;
197
      return 1;
198
 
199
    case LO_SUM:
200
      /* The operand 0 of a LO_SUM is considered constant
201
         (in fact it is related specifically to operand 1)
202
         during alias analysis.  */
203
      return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
204
             || rtx_varies_p (XEXP (x, 1), for_alias);
205
 
206
    case ASM_OPERANDS:
207
      if (MEM_VOLATILE_P (x))
208
        return 1;
209
 
210
      /* Fall through.  */
211
 
212
    default:
213
      break;
214
    }
215
 
216
  fmt = GET_RTX_FORMAT (code);
217
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
218
    if (fmt[i] == 'e')
219
      {
220
        if (rtx_varies_p (XEXP (x, i), for_alias))
221
          return 1;
222
      }
223
    else if (fmt[i] == 'E')
224
      {
225
        int j;
226
        for (j = 0; j < XVECLEN (x, i); j++)
227
          if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
228
            return 1;
229
      }
230
 
231
  return 0;
232
}
233
 
234
/* Return nonzero if the use of X as an address in a MEM can cause a trap.
235
   MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
236
   whether nonzero is returned for unaligned memory accesses on strict
237
   alignment machines.  */
238
 
239
static int
240
rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
241
                       enum machine_mode mode, bool unaligned_mems)
242
{
243
  enum rtx_code code = GET_CODE (x);
244
 
245
  if (STRICT_ALIGNMENT
246
      && unaligned_mems
247
      && GET_MODE_SIZE (mode) != 0)
248
    {
249
      HOST_WIDE_INT actual_offset = offset;
250
#ifdef SPARC_STACK_BOUNDARY_HACK
251
      /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
252
             the real alignment of %sp.  However, when it does this, the
253
             alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
254
      if (SPARC_STACK_BOUNDARY_HACK
255
          && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
256
        actual_offset -= STACK_POINTER_OFFSET;
257
#endif
258
 
259
      if (actual_offset % GET_MODE_SIZE (mode) != 0)
260
        return 1;
261
    }
262
 
263
  switch (code)
264
    {
265
    case SYMBOL_REF:
266
      if (SYMBOL_REF_WEAK (x))
267
        return 1;
268
      if (!CONSTANT_POOL_ADDRESS_P (x))
269
        {
270
          tree decl;
271
          HOST_WIDE_INT decl_size;
272
 
273
          if (offset < 0)
274
            return 1;
275
          if (size == 0)
276
            size = GET_MODE_SIZE (mode);
277
          if (size == 0)
278
            return offset != 0;
279
 
280
          /* If the size of the access or of the symbol is unknown,
281
             assume the worst.  */
282
          decl = SYMBOL_REF_DECL (x);
283
 
284
          /* Else check that the access is in bounds.  TODO: restructure
285
             expr_size/tree_expr_size/int_expr_size and just use the latter.  */
286
          if (!decl)
287
            decl_size = -1;
288
          else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
289
            decl_size = (host_integerp (DECL_SIZE_UNIT (decl), 0)
290
                         ? tree_low_cst (DECL_SIZE_UNIT (decl), 0)
291
                         : -1);
292
          else if (TREE_CODE (decl) == STRING_CST)
293
            decl_size = TREE_STRING_LENGTH (decl);
294
          else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
295
            decl_size = int_size_in_bytes (TREE_TYPE (decl));
296
          else
297
            decl_size = -1;
298
 
299
          return (decl_size <= 0 ? offset != 0 : offset + size > decl_size);
300
        }
301
 
302
      return 0;
303
 
304
    case LABEL_REF:
305
      return 0;
306
 
307
    case REG:
308
      /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
309
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
310
          || x == stack_pointer_rtx
311
          /* The arg pointer varies if it is not a fixed register.  */
312
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
313
        return 0;
314
      /* All of the virtual frame registers are stack references.  */
315
      if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
316
          && REGNO (x) <= LAST_VIRTUAL_REGISTER)
317
        return 0;
318
      return 1;
319
 
320
    case CONST:
321
      return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
322
                                    mode, unaligned_mems);
323
 
324
    case PLUS:
325
      /* An address is assumed not to trap if:
326
         - it is the pic register plus a constant.  */
327
      if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
328
        return 0;
329
 
330
      /* - or it is an address that can't trap plus a constant integer,
331
           with the proper remainder modulo the mode size if we are
332
           considering unaligned memory references.  */
333
      if (CONST_INT_P (XEXP (x, 1))
334
          && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
335
                                     size, mode, unaligned_mems))
336
        return 0;
337
 
338
      return 1;
339
 
340
    case LO_SUM:
341
    case PRE_MODIFY:
342
      return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
343
                                    mode, unaligned_mems);
344
 
345
    case PRE_DEC:
346
    case PRE_INC:
347
    case POST_DEC:
348
    case POST_INC:
349
    case POST_MODIFY:
350
      return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
351
                                    mode, unaligned_mems);
352
 
353
    default:
354
      break;
355
    }
356
 
357
  /* If it isn't one of the case above, it can cause a trap.  */
358
  return 1;
359
}
360
 
361
/* Return nonzero if the use of X as an address in a MEM can cause a trap.  */
362
 
363
int
364
rtx_addr_can_trap_p (const_rtx x)
365
{
366
  return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false);
367
}
368
 
369
/* Return true if X is an address that is known to not be zero.  */
370
 
371
bool
372
nonzero_address_p (const_rtx x)
373
{
374
  const enum rtx_code code = GET_CODE (x);
375
 
376
  switch (code)
377
    {
378
    case SYMBOL_REF:
379
      return !SYMBOL_REF_WEAK (x);
380
 
381
    case LABEL_REF:
382
      return true;
383
 
384
    case REG:
385
      /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
386
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
387
          || x == stack_pointer_rtx
388
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
389
        return true;
390
      /* All of the virtual frame registers are stack references.  */
391
      if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
392
          && REGNO (x) <= LAST_VIRTUAL_REGISTER)
393
        return true;
394
      return false;
395
 
396
    case CONST:
397
      return nonzero_address_p (XEXP (x, 0));
398
 
399
    case PLUS:
400
      if (CONST_INT_P (XEXP (x, 1)))
401
        return nonzero_address_p (XEXP (x, 0));
402
      /* Handle PIC references.  */
403
      else if (XEXP (x, 0) == pic_offset_table_rtx
404
               && CONSTANT_P (XEXP (x, 1)))
405
        return true;
406
      return false;
407
 
408
    case PRE_MODIFY:
409
      /* Similar to the above; allow positive offsets.  Further, since
410
         auto-inc is only allowed in memories, the register must be a
411
         pointer.  */
412
      if (CONST_INT_P (XEXP (x, 1))
413
          && INTVAL (XEXP (x, 1)) > 0)
414
        return true;
415
      return nonzero_address_p (XEXP (x, 0));
416
 
417
    case PRE_INC:
418
      /* Similarly.  Further, the offset is always positive.  */
419
      return true;
420
 
421
    case PRE_DEC:
422
    case POST_DEC:
423
    case POST_INC:
424
    case POST_MODIFY:
425
      return nonzero_address_p (XEXP (x, 0));
426
 
427
    case LO_SUM:
428
      return nonzero_address_p (XEXP (x, 1));
429
 
430
    default:
431
      break;
432
    }
433
 
434
  /* If it isn't one of the case above, might be zero.  */
435
  return false;
436
}
437
 
438
/* Return 1 if X refers to a memory location whose address
439
   cannot be compared reliably with constant addresses,
440
   or if X refers to a BLKmode memory object.
441
   FOR_ALIAS is nonzero if we are called from alias analysis; if it is
442
   zero, we are slightly more conservative.  */
443
 
444
bool
445
rtx_addr_varies_p (const_rtx x, bool for_alias)
446
{
447
  enum rtx_code code;
448
  int i;
449
  const char *fmt;
450
 
451
  if (x == 0)
452
    return 0;
453
 
454
  code = GET_CODE (x);
455
  if (code == MEM)
456
    return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
457
 
458
  fmt = GET_RTX_FORMAT (code);
459
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
460
    if (fmt[i] == 'e')
461
      {
462
        if (rtx_addr_varies_p (XEXP (x, i), for_alias))
463
          return 1;
464
      }
465
    else if (fmt[i] == 'E')
466
      {
467
        int j;
468
        for (j = 0; j < XVECLEN (x, i); j++)
469
          if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
470
            return 1;
471
      }
472
  return 0;
473
}
474
 
475
/* Return the value of the integer term in X, if one is apparent;
476
   otherwise return 0.
477
   Only obvious integer terms are detected.
478
   This is used in cse.c with the `related_value' field.  */
479
 
480
HOST_WIDE_INT
481
get_integer_term (const_rtx x)
482
{
483
  if (GET_CODE (x) == CONST)
484
    x = XEXP (x, 0);
485
 
486
  if (GET_CODE (x) == MINUS
487
      && CONST_INT_P (XEXP (x, 1)))
488
    return - INTVAL (XEXP (x, 1));
489
  if (GET_CODE (x) == PLUS
490
      && CONST_INT_P (XEXP (x, 1)))
491
    return INTVAL (XEXP (x, 1));
492
  return 0;
493
}
494
 
495
/* If X is a constant, return the value sans apparent integer term;
496
   otherwise return 0.
497
   Only obvious integer terms are detected.  */
498
 
499
rtx
500
get_related_value (const_rtx x)
501
{
502
  if (GET_CODE (x) != CONST)
503
    return 0;
504
  x = XEXP (x, 0);
505
  if (GET_CODE (x) == PLUS
506
      && CONST_INT_P (XEXP (x, 1)))
507
    return XEXP (x, 0);
508
  else if (GET_CODE (x) == MINUS
509
           && CONST_INT_P (XEXP (x, 1)))
510
    return XEXP (x, 0);
511
  return 0;
512
}
513
 
514
/* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
515
   to somewhere in the same object or object_block as SYMBOL.  */
516
 
517
bool
518
offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
519
{
520
  tree decl;
521
 
522
  if (GET_CODE (symbol) != SYMBOL_REF)
523
    return false;
524
 
525
  if (offset == 0)
526
    return true;
527
 
528
  if (offset > 0)
529
    {
530
      if (CONSTANT_POOL_ADDRESS_P (symbol)
531
          && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
532
        return true;
533
 
534
      decl = SYMBOL_REF_DECL (symbol);
535
      if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
536
        return true;
537
    }
538
 
539
  if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
540
      && SYMBOL_REF_BLOCK (symbol)
541
      && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
542
      && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
543
          < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
544
    return true;
545
 
546
  return false;
547
}
548
 
549
/* Split X into a base and a constant offset, storing them in *BASE_OUT
550
   and *OFFSET_OUT respectively.  */
551
 
552
void
553
split_const (rtx x, rtx *base_out, rtx *offset_out)
554
{
555
  if (GET_CODE (x) == CONST)
556
    {
557
      x = XEXP (x, 0);
558
      if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
559
        {
560
          *base_out = XEXP (x, 0);
561
          *offset_out = XEXP (x, 1);
562
          return;
563
        }
564
    }
565
  *base_out = x;
566
  *offset_out = const0_rtx;
567
}
568
 
569
/* Return the number of places FIND appears within X.  If COUNT_DEST is
570
   zero, we do not count occurrences inside the destination of a SET.  */
571
 
572
int
573
count_occurrences (const_rtx x, const_rtx find, int count_dest)
574
{
575
  int i, j;
576
  enum rtx_code code;
577
  const char *format_ptr;
578
  int count;
579
 
580
  if (x == find)
581
    return 1;
582
 
583
  code = GET_CODE (x);
584
 
585
  switch (code)
586
    {
587
    case REG:
588
    case CONST_INT:
589
    case CONST_DOUBLE:
590
    case CONST_FIXED:
591
    case CONST_VECTOR:
592
    case SYMBOL_REF:
593
    case CODE_LABEL:
594
    case PC:
595
    case CC0:
596
      return 0;
597
 
598
    case EXPR_LIST:
599
      count = count_occurrences (XEXP (x, 0), find, count_dest);
600
      if (XEXP (x, 1))
601
        count += count_occurrences (XEXP (x, 1), find, count_dest);
602
      return count;
603
 
604
    case MEM:
605
      if (MEM_P (find) && rtx_equal_p (x, find))
606
        return 1;
607
      break;
608
 
609
    case SET:
610
      if (SET_DEST (x) == find && ! count_dest)
611
        return count_occurrences (SET_SRC (x), find, count_dest);
612
      break;
613
 
614
    default:
615
      break;
616
    }
617
 
618
  format_ptr = GET_RTX_FORMAT (code);
619
  count = 0;
620
 
621
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
622
    {
623
      switch (*format_ptr++)
624
        {
625
        case 'e':
626
          count += count_occurrences (XEXP (x, i), find, count_dest);
627
          break;
628
 
629
        case 'E':
630
          for (j = 0; j < XVECLEN (x, i); j++)
631
            count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
632
          break;
633
        }
634
    }
635
  return count;
636
}
637
 
638
 
639
/* Nonzero if register REG appears somewhere within IN.
640
   Also works if REG is not a register; in this case it checks
641
   for a subexpression of IN that is Lisp "equal" to REG.  */
642
 
643
int
644
reg_mentioned_p (const_rtx reg, const_rtx in)
645
{
646
  const char *fmt;
647
  int i;
648
  enum rtx_code code;
649
 
650
  if (in == 0)
651
    return 0;
652
 
653
  if (reg == in)
654
    return 1;
655
 
656
  if (GET_CODE (in) == LABEL_REF)
657
    return reg == XEXP (in, 0);
658
 
659
  code = GET_CODE (in);
660
 
661
  switch (code)
662
    {
663
      /* Compare registers by number.  */
664
    case REG:
665
      return REG_P (reg) && REGNO (in) == REGNO (reg);
666
 
667
      /* These codes have no constituent expressions
668
         and are unique.  */
669
    case SCRATCH:
670
    case CC0:
671
    case PC:
672
      return 0;
673
 
674
    case CONST_INT:
675
    case CONST_VECTOR:
676
    case CONST_DOUBLE:
677
    case CONST_FIXED:
678
      /* These are kept unique for a given value.  */
679
      return 0;
680
 
681
    default:
682
      break;
683
    }
684
 
685
  if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
686
    return 1;
687
 
688
  fmt = GET_RTX_FORMAT (code);
689
 
690
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
691
    {
692
      if (fmt[i] == 'E')
693
        {
694
          int j;
695
          for (j = XVECLEN (in, i) - 1; j >= 0; j--)
696
            if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
697
              return 1;
698
        }
699
      else if (fmt[i] == 'e'
700
               && reg_mentioned_p (reg, XEXP (in, i)))
701
        return 1;
702
    }
703
  return 0;
704
}
705
 
706
/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
707
   no CODE_LABEL insn.  */
708
 
709
int
710
no_labels_between_p (const_rtx beg, const_rtx end)
711
{
712
  rtx p;
713
  if (beg == end)
714
    return 0;
715
  for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
716
    if (LABEL_P (p))
717
      return 0;
718
  return 1;
719
}
720
 
721
/* Nonzero if register REG is used in an insn between
722
   FROM_INSN and TO_INSN (exclusive of those two).  */
723
 
724
int
725
reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
726
{
727
  rtx insn;
728
 
729
  if (from_insn == to_insn)
730
    return 0;
731
 
732
  for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
733
    if (NONDEBUG_INSN_P (insn)
734
        && (reg_overlap_mentioned_p (reg, PATTERN (insn))
735
           || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
736
      return 1;
737
  return 0;
738
}
739
 
740
/* Nonzero if the old value of X, a register, is referenced in BODY.  If X
741
   is entirely replaced by a new value and the only use is as a SET_DEST,
742
   we do not consider it a reference.  */
743
 
744
int
745
reg_referenced_p (const_rtx x, const_rtx body)
746
{
747
  int i;
748
 
749
  switch (GET_CODE (body))
750
    {
751
    case SET:
752
      if (reg_overlap_mentioned_p (x, SET_SRC (body)))
753
        return 1;
754
 
755
      /* If the destination is anything other than CC0, PC, a REG or a SUBREG
756
         of a REG that occupies all of the REG, the insn references X if
757
         it is mentioned in the destination.  */
758
      if (GET_CODE (SET_DEST (body)) != CC0
759
          && GET_CODE (SET_DEST (body)) != PC
760
          && !REG_P (SET_DEST (body))
761
          && ! (GET_CODE (SET_DEST (body)) == SUBREG
762
                && REG_P (SUBREG_REG (SET_DEST (body)))
763
                && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
764
                      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
765
                    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
766
                         + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
767
          && reg_overlap_mentioned_p (x, SET_DEST (body)))
768
        return 1;
769
      return 0;
770
 
771
    case ASM_OPERANDS:
772
      for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
773
        if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
774
          return 1;
775
      return 0;
776
 
777
    case CALL:
778
    case USE:
779
    case IF_THEN_ELSE:
780
      return reg_overlap_mentioned_p (x, body);
781
 
782
    case TRAP_IF:
783
      return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
784
 
785
    case PREFETCH:
786
      return reg_overlap_mentioned_p (x, XEXP (body, 0));
787
 
788
    case UNSPEC:
789
    case UNSPEC_VOLATILE:
790
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
791
        if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
792
          return 1;
793
      return 0;
794
 
795
    case PARALLEL:
796
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
797
        if (reg_referenced_p (x, XVECEXP (body, 0, i)))
798
          return 1;
799
      return 0;
800
 
801
    case CLOBBER:
802
      if (MEM_P (XEXP (body, 0)))
803
        if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
804
          return 1;
805
      return 0;
806
 
807
    case COND_EXEC:
808
      if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
809
        return 1;
810
      return reg_referenced_p (x, COND_EXEC_CODE (body));
811
 
812
    default:
813
      return 0;
814
    }
815
}
816
 
817
/* Nonzero if register REG is set or clobbered in an insn between
818
   FROM_INSN and TO_INSN (exclusive of those two).  */
819
 
820
int
821
reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
822
{
823
  const_rtx insn;
824
 
825
  if (from_insn == to_insn)
826
    return 0;
827
 
828
  for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
829
    if (INSN_P (insn) && reg_set_p (reg, insn))
830
      return 1;
831
  return 0;
832
}
833
 
834
/* Internals of reg_set_between_p.  */
835
int
836
reg_set_p (const_rtx reg, const_rtx insn)
837
{
838
  /* We can be passed an insn or part of one.  If we are passed an insn,
839
     check if a side-effect of the insn clobbers REG.  */
840
  if (INSN_P (insn)
841
      && (FIND_REG_INC_NOTE (insn, reg)
842
          || (CALL_P (insn)
843
              && ((REG_P (reg)
844
                   && REGNO (reg) < FIRST_PSEUDO_REGISTER
845
                   && overlaps_hard_reg_set_p (regs_invalidated_by_call,
846
                                               GET_MODE (reg), REGNO (reg)))
847
                  || MEM_P (reg)
848
                  || find_reg_fusage (insn, CLOBBER, reg)))))
849
    return 1;
850
 
851
  return set_of (reg, insn) != NULL_RTX;
852
}
853
 
854
/* Similar to reg_set_between_p, but check all registers in X.  Return 0
855
   only if none of them are modified between START and END.  Return 1 if
856
   X contains a MEM; this routine does use memory aliasing.  */
857
 
858
int
859
modified_between_p (const_rtx x, const_rtx start, const_rtx end)
860
{
861
  const enum rtx_code code = GET_CODE (x);
862
  const char *fmt;
863
  int i, j;
864
  rtx insn;
865
 
866
  if (start == end)
867
    return 0;
868
 
869
  switch (code)
870
    {
871
    case CONST_INT:
872
    case CONST_DOUBLE:
873
    case CONST_FIXED:
874
    case CONST_VECTOR:
875
    case CONST:
876
    case SYMBOL_REF:
877
    case LABEL_REF:
878
      return 0;
879
 
880
    case PC:
881
    case CC0:
882
      return 1;
883
 
884
    case MEM:
885
      if (modified_between_p (XEXP (x, 0), start, end))
886
        return 1;
887
      if (MEM_READONLY_P (x))
888
        return 0;
889
      for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
890
        if (memory_modified_in_insn_p (x, insn))
891
          return 1;
892
      return 0;
893
      break;
894
 
895
    case REG:
896
      return reg_set_between_p (x, start, end);
897
 
898
    default:
899
      break;
900
    }
901
 
902
  fmt = GET_RTX_FORMAT (code);
903
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
904
    {
905
      if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
906
        return 1;
907
 
908
      else if (fmt[i] == 'E')
909
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
910
          if (modified_between_p (XVECEXP (x, i, j), start, end))
911
            return 1;
912
    }
913
 
914
  return 0;
915
}
916
 
917
/* Similar to reg_set_p, but check all registers in X.  Return 0 only if none
918
   of them are modified in INSN.  Return 1 if X contains a MEM; this routine
919
   does use memory aliasing.  */
920
 
921
int
922
modified_in_p (const_rtx x, const_rtx insn)
923
{
924
  const enum rtx_code code = GET_CODE (x);
925
  const char *fmt;
926
  int i, j;
927
 
928
  switch (code)
929
    {
930
    case CONST_INT:
931
    case CONST_DOUBLE:
932
    case CONST_FIXED:
933
    case CONST_VECTOR:
934
    case CONST:
935
    case SYMBOL_REF:
936
    case LABEL_REF:
937
      return 0;
938
 
939
    case PC:
940
    case CC0:
941
      return 1;
942
 
943
    case MEM:
944
      if (modified_in_p (XEXP (x, 0), insn))
945
        return 1;
946
      if (MEM_READONLY_P (x))
947
        return 0;
948
      if (memory_modified_in_insn_p (x, insn))
949
        return 1;
950
      return 0;
951
      break;
952
 
953
    case REG:
954
      return reg_set_p (x, insn);
955
 
956
    default:
957
      break;
958
    }
959
 
960
  fmt = GET_RTX_FORMAT (code);
961
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
962
    {
963
      if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
964
        return 1;
965
 
966
      else if (fmt[i] == 'E')
967
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
968
          if (modified_in_p (XVECEXP (x, i, j), insn))
969
            return 1;
970
    }
971
 
972
  return 0;
973
}
974
 
975
/* Helper function for set_of.  */
976
struct set_of_data
977
  {
978
    const_rtx found;
979
    const_rtx pat;
980
  };
981
 
982
static void
983
set_of_1 (rtx x, const_rtx pat, void *data1)
984
{
985
  struct set_of_data *const data = (struct set_of_data *) (data1);
986
  if (rtx_equal_p (x, data->pat)
987
      || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
988
    data->found = pat;
989
}
990
 
991
/* Give an INSN, return a SET or CLOBBER expression that does modify PAT
992
   (either directly or via STRICT_LOW_PART and similar modifiers).  */
993
const_rtx
994
set_of (const_rtx pat, const_rtx insn)
995
{
996
  struct set_of_data data;
997
  data.found = NULL_RTX;
998
  data.pat = pat;
999
  note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1000
  return data.found;
1001
}
1002
 
1003
/* This function, called through note_stores, collects sets and
1004
   clobbers of hard registers in a HARD_REG_SET, which is pointed to
1005
   by DATA.  */
1006
void
1007
record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1008
{
1009
  HARD_REG_SET *pset = (HARD_REG_SET *)data;
1010
  if (REG_P (x) && HARD_REGISTER_P (x))
1011
    add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1012
}
1013
 
1014
/* Examine INSN, and compute the set of hard registers written by it.
1015
   Store it in *PSET.  Should only be called after reload.  */
1016
void
1017
find_all_hard_reg_sets (const_rtx insn, HARD_REG_SET *pset)
1018
{
1019
  rtx link;
1020
 
1021
  CLEAR_HARD_REG_SET (*pset);
1022
  note_stores (PATTERN (insn), record_hard_reg_sets, pset);
1023
  if (CALL_P (insn))
1024
    IOR_HARD_REG_SET (*pset, call_used_reg_set);
1025
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1026
    if (REG_NOTE_KIND (link) == REG_INC)
1027
      record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1028
}
1029
 
1030
/* A for_each_rtx subroutine of record_hard_reg_uses.  */
1031
static int
1032
record_hard_reg_uses_1 (rtx *px, void *data)
1033
{
1034
  rtx x = *px;
1035
  HARD_REG_SET *pused = (HARD_REG_SET *)data;
1036
 
1037
  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1038
    {
1039
      int nregs = hard_regno_nregs[REGNO (x)][GET_MODE (x)];
1040
      while (nregs-- > 0)
1041
        SET_HARD_REG_BIT (*pused, REGNO (x) + nregs);
1042
    }
1043
  return 0;
1044
}
1045
 
1046
/* Like record_hard_reg_sets, but called through note_uses.  */
1047
void
1048
record_hard_reg_uses (rtx *px, void *data)
1049
{
1050
  for_each_rtx (px, record_hard_reg_uses_1, data);
1051
}
1052
 
1053
/* Given an INSN, return a SET expression if this insn has only a single SET.
1054
   It may also have CLOBBERs, USEs, or SET whose output
1055
   will not be used, which we ignore.  */
1056
 
1057
rtx
1058
single_set_2 (const_rtx insn, const_rtx pat)
1059
{
1060
  rtx set = NULL;
1061
  int set_verified = 1;
1062
  int i;
1063
 
1064
  if (GET_CODE (pat) == PARALLEL)
1065
    {
1066
      for (i = 0; i < XVECLEN (pat, 0); i++)
1067
        {
1068
          rtx sub = XVECEXP (pat, 0, i);
1069
          switch (GET_CODE (sub))
1070
            {
1071
            case USE:
1072
            case CLOBBER:
1073
              break;
1074
 
1075
            case SET:
1076
              /* We can consider insns having multiple sets, where all
1077
                 but one are dead as single set insns.  In common case
1078
                 only single set is present in the pattern so we want
1079
                 to avoid checking for REG_UNUSED notes unless necessary.
1080
 
1081
                 When we reach set first time, we just expect this is
1082
                 the single set we are looking for and only when more
1083
                 sets are found in the insn, we check them.  */
1084
              if (!set_verified)
1085
                {
1086
                  if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1087
                      && !side_effects_p (set))
1088
                    set = NULL;
1089
                  else
1090
                    set_verified = 1;
1091
                }
1092
              if (!set)
1093
                set = sub, set_verified = 0;
1094
              else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1095
                       || side_effects_p (sub))
1096
                return NULL_RTX;
1097
              break;
1098
 
1099
            default:
1100
              return NULL_RTX;
1101
            }
1102
        }
1103
    }
1104
  return set;
1105
}
1106
 
1107
/* Given an INSN, return nonzero if it has more than one SET, else return
1108
   zero.  */
1109
 
1110
int
1111
multiple_sets (const_rtx insn)
1112
{
1113
  int found;
1114
  int i;
1115
 
1116
  /* INSN must be an insn.  */
1117
  if (! INSN_P (insn))
1118
    return 0;
1119
 
1120
  /* Only a PARALLEL can have multiple SETs.  */
1121
  if (GET_CODE (PATTERN (insn)) == PARALLEL)
1122
    {
1123
      for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1124
        if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1125
          {
1126
            /* If we have already found a SET, then return now.  */
1127
            if (found)
1128
              return 1;
1129
            else
1130
              found = 1;
1131
          }
1132
    }
1133
 
1134
  /* Either zero or one SET.  */
1135
  return 0;
1136
}
1137
 
1138
/* Return nonzero if the destination of SET equals the source
1139
   and there are no side effects.  */
1140
 
1141
int
1142
set_noop_p (const_rtx set)
1143
{
1144
  rtx src = SET_SRC (set);
1145
  rtx dst = SET_DEST (set);
1146
 
1147
  if (dst == pc_rtx && src == pc_rtx)
1148
    return 1;
1149
 
1150
  if (MEM_P (dst) && MEM_P (src))
1151
    return rtx_equal_p (dst, src) && !side_effects_p (dst);
1152
 
1153
  if (GET_CODE (dst) == ZERO_EXTRACT)
1154
    return rtx_equal_p (XEXP (dst, 0), src)
1155
           && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1156
           && !side_effects_p (src);
1157
 
1158
  if (GET_CODE (dst) == STRICT_LOW_PART)
1159
    dst = XEXP (dst, 0);
1160
 
1161
  if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1162
    {
1163
      if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1164
        return 0;
1165
      src = SUBREG_REG (src);
1166
      dst = SUBREG_REG (dst);
1167
    }
1168
 
1169
  return (REG_P (src) && REG_P (dst)
1170
          && REGNO (src) == REGNO (dst));
1171
}
1172
 
1173
/* Return nonzero if an insn consists only of SETs, each of which only sets a
1174
   value to itself.  */
1175
 
1176
int
1177
noop_move_p (const_rtx insn)
1178
{
1179
  rtx pat = PATTERN (insn);
1180
 
1181
  if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1182
    return 1;
1183
 
1184
  /* Insns carrying these notes are useful later on.  */
1185
  if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1186
    return 0;
1187
 
1188
  if (GET_CODE (pat) == SET && set_noop_p (pat))
1189
    return 1;
1190
 
1191
  if (GET_CODE (pat) == PARALLEL)
1192
    {
1193
      int i;
1194
      /* If nothing but SETs of registers to themselves,
1195
         this insn can also be deleted.  */
1196
      for (i = 0; i < XVECLEN (pat, 0); i++)
1197
        {
1198
          rtx tem = XVECEXP (pat, 0, i);
1199
 
1200
          if (GET_CODE (tem) == USE
1201
              || GET_CODE (tem) == CLOBBER)
1202
            continue;
1203
 
1204
          if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1205
            return 0;
1206
        }
1207
 
1208
      return 1;
1209
    }
1210
  return 0;
1211
}
1212
 
1213
 
1214
/* Return the last thing that X was assigned from before *PINSN.  If VALID_TO
1215
   is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1216
   If the object was modified, if we hit a partial assignment to X, or hit a
1217
   CODE_LABEL first, return X.  If we found an assignment, update *PINSN to
1218
   point to it.  ALLOW_HWREG is set to 1 if hardware registers are allowed to
1219
   be the src.  */
1220
 
1221
rtx
1222
find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1223
{
1224
  rtx p;
1225
 
1226
  for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1227
       p = PREV_INSN (p))
1228
    if (INSN_P (p))
1229
      {
1230
        rtx set = single_set (p);
1231
        rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1232
 
1233
        if (set && rtx_equal_p (x, SET_DEST (set)))
1234
          {
1235
            rtx src = SET_SRC (set);
1236
 
1237
            if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1238
              src = XEXP (note, 0);
1239
 
1240
            if ((valid_to == NULL_RTX
1241
                 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1242
                /* Reject hard registers because we don't usually want
1243
                   to use them; we'd rather use a pseudo.  */
1244
                && (! (REG_P (src)
1245
                      && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1246
              {
1247
                *pinsn = p;
1248
                return src;
1249
              }
1250
          }
1251
 
1252
        /* If set in non-simple way, we don't have a value.  */
1253
        if (reg_set_p (x, p))
1254
          break;
1255
      }
1256
 
1257
  return x;
1258
}
1259
 
1260
/* Return nonzero if register in range [REGNO, ENDREGNO)
1261
   appears either explicitly or implicitly in X
1262
   other than being stored into.
1263
 
1264
   References contained within the substructure at LOC do not count.
1265
   LOC may be zero, meaning don't ignore anything.  */
1266
 
1267
int
1268
refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1269
                   rtx *loc)
1270
{
1271
  int i;
1272
  unsigned int x_regno;
1273
  RTX_CODE code;
1274
  const char *fmt;
1275
 
1276
 repeat:
1277
  /* The contents of a REG_NONNEG note is always zero, so we must come here
1278
     upon repeat in case the last REG_NOTE is a REG_NONNEG note.  */
1279
  if (x == 0)
1280
    return 0;
1281
 
1282
  code = GET_CODE (x);
1283
 
1284
  switch (code)
1285
    {
1286
    case REG:
1287
      x_regno = REGNO (x);
1288
 
1289
      /* If we modifying the stack, frame, or argument pointer, it will
1290
         clobber a virtual register.  In fact, we could be more precise,
1291
         but it isn't worth it.  */
1292
      if ((x_regno == STACK_POINTER_REGNUM
1293
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1294
           || x_regno == ARG_POINTER_REGNUM
1295
#endif
1296
           || x_regno == FRAME_POINTER_REGNUM)
1297
          && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1298
        return 1;
1299
 
1300
      return endregno > x_regno && regno < END_REGNO (x);
1301
 
1302
    case SUBREG:
1303
      /* If this is a SUBREG of a hard reg, we can see exactly which
1304
         registers are being modified.  Otherwise, handle normally.  */
1305
      if (REG_P (SUBREG_REG (x))
1306
          && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1307
        {
1308
          unsigned int inner_regno = subreg_regno (x);
1309
          unsigned int inner_endregno
1310
            = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1311
                             ? subreg_nregs (x) : 1);
1312
 
1313
          return endregno > inner_regno && regno < inner_endregno;
1314
        }
1315
      break;
1316
 
1317
    case CLOBBER:
1318
    case SET:
1319
      if (&SET_DEST (x) != loc
1320
          /* Note setting a SUBREG counts as referring to the REG it is in for
1321
             a pseudo but not for hard registers since we can
1322
             treat each word individually.  */
1323
          && ((GET_CODE (SET_DEST (x)) == SUBREG
1324
               && loc != &SUBREG_REG (SET_DEST (x))
1325
               && REG_P (SUBREG_REG (SET_DEST (x)))
1326
               && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1327
               && refers_to_regno_p (regno, endregno,
1328
                                     SUBREG_REG (SET_DEST (x)), loc))
1329
              || (!REG_P (SET_DEST (x))
1330
                  && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1331
        return 1;
1332
 
1333
      if (code == CLOBBER || loc == &SET_SRC (x))
1334
        return 0;
1335
      x = SET_SRC (x);
1336
      goto repeat;
1337
 
1338
    default:
1339
      break;
1340
    }
1341
 
1342
  /* X does not match, so try its subexpressions.  */
1343
 
1344
  fmt = GET_RTX_FORMAT (code);
1345
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1346
    {
1347
      if (fmt[i] == 'e' && loc != &XEXP (x, i))
1348
        {
1349
          if (i == 0)
1350
            {
1351
              x = XEXP (x, 0);
1352
              goto repeat;
1353
            }
1354
          else
1355
            if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1356
              return 1;
1357
        }
1358
      else if (fmt[i] == 'E')
1359
        {
1360
          int j;
1361
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1362
            if (loc != &XVECEXP (x, i, j)
1363
                && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1364
              return 1;
1365
        }
1366
    }
1367
  return 0;
1368
}
1369
 
1370
/* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
1371
   we check if any register number in X conflicts with the relevant register
1372
   numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
1373
   contains a MEM (we don't bother checking for memory addresses that can't
1374
   conflict because we expect this to be a rare case.  */
1375
 
1376
int
1377
reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1378
{
1379
  unsigned int regno, endregno;
1380
 
1381
  /* If either argument is a constant, then modifying X can not
1382
     affect IN.  Here we look at IN, we can profitably combine
1383
     CONSTANT_P (x) with the switch statement below.  */
1384
  if (CONSTANT_P (in))
1385
    return 0;
1386
 
1387
 recurse:
1388
  switch (GET_CODE (x))
1389
    {
1390
    case STRICT_LOW_PART:
1391
    case ZERO_EXTRACT:
1392
    case SIGN_EXTRACT:
1393
      /* Overly conservative.  */
1394
      x = XEXP (x, 0);
1395
      goto recurse;
1396
 
1397
    case SUBREG:
1398
      regno = REGNO (SUBREG_REG (x));
1399
      if (regno < FIRST_PSEUDO_REGISTER)
1400
        regno = subreg_regno (x);
1401
      endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1402
                          ? subreg_nregs (x) : 1);
1403
      goto do_reg;
1404
 
1405
    case REG:
1406
      regno = REGNO (x);
1407
      endregno = END_REGNO (x);
1408
    do_reg:
1409
      return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1410
 
1411
    case MEM:
1412
      {
1413
        const char *fmt;
1414
        int i;
1415
 
1416
        if (MEM_P (in))
1417
          return 1;
1418
 
1419
        fmt = GET_RTX_FORMAT (GET_CODE (in));
1420
        for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1421
          if (fmt[i] == 'e')
1422
            {
1423
              if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1424
                return 1;
1425
            }
1426
          else if (fmt[i] == 'E')
1427
            {
1428
              int j;
1429
              for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1430
                if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1431
                  return 1;
1432
            }
1433
 
1434
        return 0;
1435
      }
1436
 
1437
    case SCRATCH:
1438
    case PC:
1439
    case CC0:
1440
      return reg_mentioned_p (x, in);
1441
 
1442
    case PARALLEL:
1443
      {
1444
        int i;
1445
 
1446
        /* If any register in here refers to it we return true.  */
1447
        for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1448
          if (XEXP (XVECEXP (x, 0, i), 0) != 0
1449
              && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1450
            return 1;
1451
        return 0;
1452
      }
1453
 
1454
    default:
1455
      gcc_assert (CONSTANT_P (x));
1456
      return 0;
1457
    }
1458
}
1459
 
1460
/* Call FUN on each register or MEM that is stored into or clobbered by X.
1461
   (X would be the pattern of an insn).  DATA is an arbitrary pointer,
1462
   ignored by note_stores, but passed to FUN.
1463
 
1464
   FUN receives three arguments:
1465
   1. the REG, MEM, CC0 or PC being stored in or clobbered,
1466
   2. the SET or CLOBBER rtx that does the store,
1467
   3. the pointer DATA provided to note_stores.
1468
 
1469
  If the item being stored in or clobbered is a SUBREG of a hard register,
1470
  the SUBREG will be passed.  */
1471
 
1472
void
1473
note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
1474
{
1475
  int i;
1476
 
1477
  if (GET_CODE (x) == COND_EXEC)
1478
    x = COND_EXEC_CODE (x);
1479
 
1480
  if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1481
    {
1482
      rtx dest = SET_DEST (x);
1483
 
1484
      while ((GET_CODE (dest) == SUBREG
1485
              && (!REG_P (SUBREG_REG (dest))
1486
                  || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1487
             || GET_CODE (dest) == ZERO_EXTRACT
1488
             || GET_CODE (dest) == STRICT_LOW_PART)
1489
        dest = XEXP (dest, 0);
1490
 
1491
      /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1492
         each of whose first operand is a register.  */
1493
      if (GET_CODE (dest) == PARALLEL)
1494
        {
1495
          for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1496
            if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1497
              (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1498
        }
1499
      else
1500
        (*fun) (dest, x, data);
1501
    }
1502
 
1503
  else if (GET_CODE (x) == PARALLEL)
1504
    for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1505
      note_stores (XVECEXP (x, 0, i), fun, data);
1506
}
1507
 
1508
/* Like notes_stores, but call FUN for each expression that is being
1509
   referenced in PBODY, a pointer to the PATTERN of an insn.  We only call
1510
   FUN for each expression, not any interior subexpressions.  FUN receives a
1511
   pointer to the expression and the DATA passed to this function.
1512
 
1513
   Note that this is not quite the same test as that done in reg_referenced_p
1514
   since that considers something as being referenced if it is being
1515
   partially set, while we do not.  */
1516
 
1517
void
1518
note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1519
{
1520
  rtx body = *pbody;
1521
  int i;
1522
 
1523
  switch (GET_CODE (body))
1524
    {
1525
    case COND_EXEC:
1526
      (*fun) (&COND_EXEC_TEST (body), data);
1527
      note_uses (&COND_EXEC_CODE (body), fun, data);
1528
      return;
1529
 
1530
    case PARALLEL:
1531
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1532
        note_uses (&XVECEXP (body, 0, i), fun, data);
1533
      return;
1534
 
1535
    case SEQUENCE:
1536
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1537
        note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1538
      return;
1539
 
1540
    case USE:
1541
      (*fun) (&XEXP (body, 0), data);
1542
      return;
1543
 
1544
    case ASM_OPERANDS:
1545
      for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1546
        (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1547
      return;
1548
 
1549
    case TRAP_IF:
1550
      (*fun) (&TRAP_CONDITION (body), data);
1551
      return;
1552
 
1553
    case PREFETCH:
1554
      (*fun) (&XEXP (body, 0), data);
1555
      return;
1556
 
1557
    case UNSPEC:
1558
    case UNSPEC_VOLATILE:
1559
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1560
        (*fun) (&XVECEXP (body, 0, i), data);
1561
      return;
1562
 
1563
    case CLOBBER:
1564
      if (MEM_P (XEXP (body, 0)))
1565
        (*fun) (&XEXP (XEXP (body, 0), 0), data);
1566
      return;
1567
 
1568
    case SET:
1569
      {
1570
        rtx dest = SET_DEST (body);
1571
 
1572
        /* For sets we replace everything in source plus registers in memory
1573
           expression in store and operands of a ZERO_EXTRACT.  */
1574
        (*fun) (&SET_SRC (body), data);
1575
 
1576
        if (GET_CODE (dest) == ZERO_EXTRACT)
1577
          {
1578
            (*fun) (&XEXP (dest, 1), data);
1579
            (*fun) (&XEXP (dest, 2), data);
1580
          }
1581
 
1582
        while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1583
          dest = XEXP (dest, 0);
1584
 
1585
        if (MEM_P (dest))
1586
          (*fun) (&XEXP (dest, 0), data);
1587
      }
1588
      return;
1589
 
1590
    default:
1591
      /* All the other possibilities never store.  */
1592
      (*fun) (pbody, data);
1593
      return;
1594
    }
1595
}
1596
 
1597
/* Return nonzero if X's old contents don't survive after INSN.
1598
   This will be true if X is (cc0) or if X is a register and
1599
   X dies in INSN or because INSN entirely sets X.
1600
 
1601
   "Entirely set" means set directly and not through a SUBREG, or
1602
   ZERO_EXTRACT, so no trace of the old contents remains.
1603
   Likewise, REG_INC does not count.
1604
 
1605
   REG may be a hard or pseudo reg.  Renumbering is not taken into account,
1606
   but for this use that makes no difference, since regs don't overlap
1607
   during their lifetimes.  Therefore, this function may be used
1608
   at any time after deaths have been computed.
1609
 
1610
   If REG is a hard reg that occupies multiple machine registers, this
1611
   function will only return 1 if each of those registers will be replaced
1612
   by INSN.  */
1613
 
1614
int
1615
dead_or_set_p (const_rtx insn, const_rtx x)
1616
{
1617
  unsigned int regno, end_regno;
1618
  unsigned int i;
1619
 
1620
  /* Can't use cc0_rtx below since this file is used by genattrtab.c.  */
1621
  if (GET_CODE (x) == CC0)
1622
    return 1;
1623
 
1624
  gcc_assert (REG_P (x));
1625
 
1626
  regno = REGNO (x);
1627
  end_regno = END_REGNO (x);
1628
  for (i = regno; i < end_regno; i++)
1629
    if (! dead_or_set_regno_p (insn, i))
1630
      return 0;
1631
 
1632
  return 1;
1633
}
1634
 
1635
/* Return TRUE iff DEST is a register or subreg of a register and
1636
   doesn't change the number of words of the inner register, and any
1637
   part of the register is TEST_REGNO.  */
1638
 
1639
static bool
1640
covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
1641
{
1642
  unsigned int regno, endregno;
1643
 
1644
  if (GET_CODE (dest) == SUBREG
1645
      && (((GET_MODE_SIZE (GET_MODE (dest))
1646
            + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1647
          == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1648
               + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1649
    dest = SUBREG_REG (dest);
1650
 
1651
  if (!REG_P (dest))
1652
    return false;
1653
 
1654
  regno = REGNO (dest);
1655
  endregno = END_REGNO (dest);
1656
  return (test_regno >= regno && test_regno < endregno);
1657
}
1658
 
1659
/* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1660
   any member matches the covers_regno_no_parallel_p criteria.  */
1661
 
1662
static bool
1663
covers_regno_p (const_rtx dest, unsigned int test_regno)
1664
{
1665
  if (GET_CODE (dest) == PARALLEL)
1666
    {
1667
      /* Some targets place small structures in registers for return
1668
         values of functions, and those registers are wrapped in
1669
         PARALLELs that we may see as the destination of a SET.  */
1670
      int i;
1671
 
1672
      for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1673
        {
1674
          rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1675
          if (inner != NULL_RTX
1676
              && covers_regno_no_parallel_p (inner, test_regno))
1677
            return true;
1678
        }
1679
 
1680
      return false;
1681
    }
1682
  else
1683
    return covers_regno_no_parallel_p (dest, test_regno);
1684
}
1685
 
1686
/* Utility function for dead_or_set_p to check an individual register. */
1687
 
1688
int
1689
dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
1690
{
1691
  const_rtx pattern;
1692
 
1693
  /* See if there is a death note for something that includes TEST_REGNO.  */
1694
  if (find_regno_note (insn, REG_DEAD, test_regno))
1695
    return 1;
1696
 
1697
  if (CALL_P (insn)
1698
      && find_regno_fusage (insn, CLOBBER, test_regno))
1699
    return 1;
1700
 
1701
  pattern = PATTERN (insn);
1702
 
1703
  if (GET_CODE (pattern) == COND_EXEC)
1704
    pattern = COND_EXEC_CODE (pattern);
1705
 
1706
  if (GET_CODE (pattern) == SET)
1707
    return covers_regno_p (SET_DEST (pattern), test_regno);
1708
  else if (GET_CODE (pattern) == PARALLEL)
1709
    {
1710
      int i;
1711
 
1712
      for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1713
        {
1714
          rtx body = XVECEXP (pattern, 0, i);
1715
 
1716
          if (GET_CODE (body) == COND_EXEC)
1717
            body = COND_EXEC_CODE (body);
1718
 
1719
          if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1720
              && covers_regno_p (SET_DEST (body), test_regno))
1721
            return 1;
1722
        }
1723
    }
1724
 
1725
  return 0;
1726
}
1727
 
1728
/* Return the reg-note of kind KIND in insn INSN, if there is one.
1729
   If DATUM is nonzero, look for one whose datum is DATUM.  */
1730
 
1731
rtx
1732
find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
1733
{
1734
  rtx link;
1735
 
1736
  gcc_checking_assert (insn);
1737
 
1738
  /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1739
  if (! INSN_P (insn))
1740
    return 0;
1741
  if (datum == 0)
1742
    {
1743
      for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1744
        if (REG_NOTE_KIND (link) == kind)
1745
          return link;
1746
      return 0;
1747
    }
1748
 
1749
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1750
    if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1751
      return link;
1752
  return 0;
1753
}
1754
 
1755
/* Return the reg-note of kind KIND in insn INSN which applies to register
1756
   number REGNO, if any.  Return 0 if there is no such reg-note.  Note that
1757
   the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1758
   it might be the case that the note overlaps REGNO.  */
1759
 
1760
rtx
1761
find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
1762
{
1763
  rtx link;
1764
 
1765
  /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1766
  if (! INSN_P (insn))
1767
    return 0;
1768
 
1769
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1770
    if (REG_NOTE_KIND (link) == kind
1771
        /* Verify that it is a register, so that scratch and MEM won't cause a
1772
           problem here.  */
1773
        && REG_P (XEXP (link, 0))
1774
        && REGNO (XEXP (link, 0)) <= regno
1775
        && END_REGNO (XEXP (link, 0)) > regno)
1776
      return link;
1777
  return 0;
1778
}
1779
 
1780
/* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1781
   has such a note.  */
1782
 
1783
rtx
1784
find_reg_equal_equiv_note (const_rtx insn)
1785
{
1786
  rtx link;
1787
 
1788
  if (!INSN_P (insn))
1789
    return 0;
1790
 
1791
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1792
    if (REG_NOTE_KIND (link) == REG_EQUAL
1793
        || REG_NOTE_KIND (link) == REG_EQUIV)
1794
      {
1795
        /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1796
           insns that have multiple sets.  Checking single_set to
1797
           make sure of this is not the proper check, as explained
1798
           in the comment in set_unique_reg_note.
1799
 
1800
           This should be changed into an assert.  */
1801
        if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1802
          return 0;
1803
        return link;
1804
      }
1805
  return NULL;
1806
}
1807
 
1808
/* Check whether INSN is a single_set whose source is known to be
1809
   equivalent to a constant.  Return that constant if so, otherwise
1810
   return null.  */
1811
 
1812
rtx
1813
find_constant_src (const_rtx insn)
1814
{
1815
  rtx note, set, x;
1816
 
1817
  set = single_set (insn);
1818
  if (set)
1819
    {
1820
      x = avoid_constant_pool_reference (SET_SRC (set));
1821
      if (CONSTANT_P (x))
1822
        return x;
1823
    }
1824
 
1825
  note = find_reg_equal_equiv_note (insn);
1826
  if (note && CONSTANT_P (XEXP (note, 0)))
1827
    return XEXP (note, 0);
1828
 
1829
  return NULL_RTX;
1830
}
1831
 
1832
/* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1833
   in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1834
 
1835
int
1836
find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
1837
{
1838
  /* If it's not a CALL_INSN, it can't possibly have a
1839
     CALL_INSN_FUNCTION_USAGE field, so don't bother checking.  */
1840
  if (!CALL_P (insn))
1841
    return 0;
1842
 
1843
  gcc_assert (datum);
1844
 
1845
  if (!REG_P (datum))
1846
    {
1847
      rtx link;
1848
 
1849
      for (link = CALL_INSN_FUNCTION_USAGE (insn);
1850
           link;
1851
           link = XEXP (link, 1))
1852
        if (GET_CODE (XEXP (link, 0)) == code
1853
            && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1854
          return 1;
1855
    }
1856
  else
1857
    {
1858
      unsigned int regno = REGNO (datum);
1859
 
1860
      /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1861
         to pseudo registers, so don't bother checking.  */
1862
 
1863
      if (regno < FIRST_PSEUDO_REGISTER)
1864
        {
1865
          unsigned int end_regno = END_HARD_REGNO (datum);
1866
          unsigned int i;
1867
 
1868
          for (i = regno; i < end_regno; i++)
1869
            if (find_regno_fusage (insn, code, i))
1870
              return 1;
1871
        }
1872
    }
1873
 
1874
  return 0;
1875
}
1876
 
1877
/* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1878
   in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1879
 
1880
int
1881
find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
1882
{
1883
  rtx link;
1884
 
1885
  /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1886
     to pseudo registers, so don't bother checking.  */
1887
 
1888
  if (regno >= FIRST_PSEUDO_REGISTER
1889
      || !CALL_P (insn) )
1890
    return 0;
1891
 
1892
  for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1893
    {
1894
      rtx op, reg;
1895
 
1896
      if (GET_CODE (op = XEXP (link, 0)) == code
1897
          && REG_P (reg = XEXP (op, 0))
1898
          && REGNO (reg) <= regno
1899
          && END_HARD_REGNO (reg) > regno)
1900
        return 1;
1901
    }
1902
 
1903
  return 0;
1904
}
1905
 
1906
 
1907
/* Allocate a register note with kind KIND and datum DATUM.  LIST is
1908
   stored as the pointer to the next register note.  */
1909
 
1910
rtx
1911
alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
1912
{
1913
  rtx note;
1914
 
1915
  switch (kind)
1916
    {
1917
    case REG_CC_SETTER:
1918
    case REG_CC_USER:
1919
    case REG_LABEL_TARGET:
1920
    case REG_LABEL_OPERAND:
1921
    case REG_TM:
1922
      /* These types of register notes use an INSN_LIST rather than an
1923
         EXPR_LIST, so that copying is done right and dumps look
1924
         better.  */
1925
      note = alloc_INSN_LIST (datum, list);
1926
      PUT_REG_NOTE_KIND (note, kind);
1927
      break;
1928
 
1929
    default:
1930
      note = alloc_EXPR_LIST (kind, datum, list);
1931
      break;
1932
    }
1933
 
1934
  return note;
1935
}
1936
 
1937
/* Add register note with kind KIND and datum DATUM to INSN.  */
1938
 
1939
void
1940
add_reg_note (rtx insn, enum reg_note kind, rtx datum)
1941
{
1942
  REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
1943
}
1944
 
1945
/* Remove register note NOTE from the REG_NOTES of INSN.  */
1946
 
1947
void
1948
remove_note (rtx insn, const_rtx note)
1949
{
1950
  rtx link;
1951
 
1952
  if (note == NULL_RTX)
1953
    return;
1954
 
1955
  if (REG_NOTES (insn) == note)
1956
    REG_NOTES (insn) = XEXP (note, 1);
1957
  else
1958
    for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1959
      if (XEXP (link, 1) == note)
1960
        {
1961
          XEXP (link, 1) = XEXP (note, 1);
1962
          break;
1963
        }
1964
 
1965
  switch (REG_NOTE_KIND (note))
1966
    {
1967
    case REG_EQUAL:
1968
    case REG_EQUIV:
1969
      df_notes_rescan (insn);
1970
      break;
1971
    default:
1972
      break;
1973
    }
1974
}
1975
 
1976
/* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes.  */
1977
 
1978
void
1979
remove_reg_equal_equiv_notes (rtx insn)
1980
{
1981
  rtx *loc;
1982
 
1983
  loc = &REG_NOTES (insn);
1984
  while (*loc)
1985
    {
1986
      enum reg_note kind = REG_NOTE_KIND (*loc);
1987
      if (kind == REG_EQUAL || kind == REG_EQUIV)
1988
        *loc = XEXP (*loc, 1);
1989
      else
1990
        loc = &XEXP (*loc, 1);
1991
    }
1992
}
1993
 
1994
/* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO.  */
1995
 
1996
void
1997
remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
1998
{
1999
  df_ref eq_use;
2000
 
2001
  if (!df)
2002
    return;
2003
 
2004
  /* This loop is a little tricky.  We cannot just go down the chain because
2005
     it is being modified by some actions in the loop.  So we just iterate
2006
     over the head.  We plan to drain the list anyway.  */
2007
  while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL)
2008
    {
2009
      rtx insn = DF_REF_INSN (eq_use);
2010
      rtx note = find_reg_equal_equiv_note (insn);
2011
 
2012
      /* This assert is generally triggered when someone deletes a REG_EQUAL
2013
         or REG_EQUIV note by hacking the list manually rather than calling
2014
         remove_note.  */
2015
      gcc_assert (note);
2016
 
2017
      remove_note (insn, note);
2018
    }
2019
}
2020
 
2021
/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2022
   return 1 if it is found.  A simple equality test is used to determine if
2023
   NODE matches.  */
2024
 
2025
int
2026
in_expr_list_p (const_rtx listp, const_rtx node)
2027
{
2028
  const_rtx x;
2029
 
2030
  for (x = listp; x; x = XEXP (x, 1))
2031
    if (node == XEXP (x, 0))
2032
      return 1;
2033
 
2034
  return 0;
2035
}
2036
 
2037
/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2038
   remove that entry from the list if it is found.
2039
 
2040
   A simple equality test is used to determine if NODE matches.  */
2041
 
2042
void
2043
remove_node_from_expr_list (const_rtx node, rtx *listp)
2044
{
2045
  rtx temp = *listp;
2046
  rtx prev = NULL_RTX;
2047
 
2048
  while (temp)
2049
    {
2050
      if (node == XEXP (temp, 0))
2051
        {
2052
          /* Splice the node out of the list.  */
2053
          if (prev)
2054
            XEXP (prev, 1) = XEXP (temp, 1);
2055
          else
2056
            *listp = XEXP (temp, 1);
2057
 
2058
          return;
2059
        }
2060
 
2061
      prev = temp;
2062
      temp = XEXP (temp, 1);
2063
    }
2064
}
2065
 
2066
/* Nonzero if X contains any volatile instructions.  These are instructions
2067
   which may cause unpredictable machine state instructions, and thus no
2068
   instructions should be moved or combined across them.  This includes
2069
   only volatile asms and UNSPEC_VOLATILE instructions.  */
2070
 
2071
int
2072
volatile_insn_p (const_rtx x)
2073
{
2074
  const RTX_CODE code = GET_CODE (x);
2075
  switch (code)
2076
    {
2077
    case LABEL_REF:
2078
    case SYMBOL_REF:
2079
    case CONST_INT:
2080
    case CONST:
2081
    case CONST_DOUBLE:
2082
    case CONST_FIXED:
2083
    case CONST_VECTOR:
2084
    case CC0:
2085
    case PC:
2086
    case REG:
2087
    case SCRATCH:
2088
    case CLOBBER:
2089
    case ADDR_VEC:
2090
    case ADDR_DIFF_VEC:
2091
    case CALL:
2092
    case MEM:
2093
      return 0;
2094
 
2095
    case UNSPEC_VOLATILE:
2096
 /* case TRAP_IF: This isn't clear yet.  */
2097
      return 1;
2098
 
2099
    case ASM_INPUT:
2100
    case ASM_OPERANDS:
2101
      if (MEM_VOLATILE_P (x))
2102
        return 1;
2103
 
2104
    default:
2105
      break;
2106
    }
2107
 
2108
  /* Recursively scan the operands of this expression.  */
2109
 
2110
  {
2111
    const char *const fmt = GET_RTX_FORMAT (code);
2112
    int i;
2113
 
2114
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2115
      {
2116
        if (fmt[i] == 'e')
2117
          {
2118
            if (volatile_insn_p (XEXP (x, i)))
2119
              return 1;
2120
          }
2121
        else if (fmt[i] == 'E')
2122
          {
2123
            int j;
2124
            for (j = 0; j < XVECLEN (x, i); j++)
2125
              if (volatile_insn_p (XVECEXP (x, i, j)))
2126
                return 1;
2127
          }
2128
      }
2129
  }
2130
  return 0;
2131
}
2132
 
2133
/* Nonzero if X contains any volatile memory references
2134
   UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions.  */
2135
 
2136
int
2137
volatile_refs_p (const_rtx x)
2138
{
2139
  const RTX_CODE code = GET_CODE (x);
2140
  switch (code)
2141
    {
2142
    case LABEL_REF:
2143
    case SYMBOL_REF:
2144
    case CONST_INT:
2145
    case CONST:
2146
    case CONST_DOUBLE:
2147
    case CONST_FIXED:
2148
    case CONST_VECTOR:
2149
    case CC0:
2150
    case PC:
2151
    case REG:
2152
    case SCRATCH:
2153
    case CLOBBER:
2154
    case ADDR_VEC:
2155
    case ADDR_DIFF_VEC:
2156
      return 0;
2157
 
2158
    case UNSPEC_VOLATILE:
2159
      return 1;
2160
 
2161
    case MEM:
2162
    case ASM_INPUT:
2163
    case ASM_OPERANDS:
2164
      if (MEM_VOLATILE_P (x))
2165
        return 1;
2166
 
2167
    default:
2168
      break;
2169
    }
2170
 
2171
  /* Recursively scan the operands of this expression.  */
2172
 
2173
  {
2174
    const char *const fmt = GET_RTX_FORMAT (code);
2175
    int i;
2176
 
2177
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2178
      {
2179
        if (fmt[i] == 'e')
2180
          {
2181
            if (volatile_refs_p (XEXP (x, i)))
2182
              return 1;
2183
          }
2184
        else if (fmt[i] == 'E')
2185
          {
2186
            int j;
2187
            for (j = 0; j < XVECLEN (x, i); j++)
2188
              if (volatile_refs_p (XVECEXP (x, i, j)))
2189
                return 1;
2190
          }
2191
      }
2192
  }
2193
  return 0;
2194
}
2195
 
2196
/* Similar to above, except that it also rejects register pre- and post-
2197
   incrementing.  */
2198
 
2199
int
2200
side_effects_p (const_rtx x)
2201
{
2202
  const RTX_CODE code = GET_CODE (x);
2203
  switch (code)
2204
    {
2205
    case LABEL_REF:
2206
    case SYMBOL_REF:
2207
    case CONST_INT:
2208
    case CONST:
2209
    case CONST_DOUBLE:
2210
    case CONST_FIXED:
2211
    case CONST_VECTOR:
2212
    case CC0:
2213
    case PC:
2214
    case REG:
2215
    case SCRATCH:
2216
    case ADDR_VEC:
2217
    case ADDR_DIFF_VEC:
2218
    case VAR_LOCATION:
2219
      return 0;
2220
 
2221
    case CLOBBER:
2222
      /* Reject CLOBBER with a non-VOID mode.  These are made by combine.c
2223
         when some combination can't be done.  If we see one, don't think
2224
         that we can simplify the expression.  */
2225
      return (GET_MODE (x) != VOIDmode);
2226
 
2227
    case PRE_INC:
2228
    case PRE_DEC:
2229
    case POST_INC:
2230
    case POST_DEC:
2231
    case PRE_MODIFY:
2232
    case POST_MODIFY:
2233
    case CALL:
2234
    case UNSPEC_VOLATILE:
2235
 /* case TRAP_IF: This isn't clear yet.  */
2236
      return 1;
2237
 
2238
    case MEM:
2239
    case ASM_INPUT:
2240
    case ASM_OPERANDS:
2241
      if (MEM_VOLATILE_P (x))
2242
        return 1;
2243
 
2244
    default:
2245
      break;
2246
    }
2247
 
2248
  /* Recursively scan the operands of this expression.  */
2249
 
2250
  {
2251
    const char *fmt = GET_RTX_FORMAT (code);
2252
    int i;
2253
 
2254
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2255
      {
2256
        if (fmt[i] == 'e')
2257
          {
2258
            if (side_effects_p (XEXP (x, i)))
2259
              return 1;
2260
          }
2261
        else if (fmt[i] == 'E')
2262
          {
2263
            int j;
2264
            for (j = 0; j < XVECLEN (x, i); j++)
2265
              if (side_effects_p (XVECEXP (x, i, j)))
2266
                return 1;
2267
          }
2268
      }
2269
  }
2270
  return 0;
2271
}
2272
 
2273
/* Return nonzero if evaluating rtx X might cause a trap.
2274
   FLAGS controls how to consider MEMs.  A nonzero means the context
2275
   of the access may have changed from the original, such that the
2276
   address may have become invalid.  */
2277
 
2278
int
2279
may_trap_p_1 (const_rtx x, unsigned flags)
2280
{
2281
  int i;
2282
  enum rtx_code code;
2283
  const char *fmt;
2284
 
2285
  /* We make no distinction currently, but this function is part of
2286
     the internal target-hooks ABI so we keep the parameter as
2287
     "unsigned flags".  */
2288
  bool code_changed = flags != 0;
2289
 
2290
  if (x == 0)
2291
    return 0;
2292
  code = GET_CODE (x);
2293
  switch (code)
2294
    {
2295
      /* Handle these cases quickly.  */
2296
    case CONST_INT:
2297
    case CONST_DOUBLE:
2298
    case CONST_FIXED:
2299
    case CONST_VECTOR:
2300
    case SYMBOL_REF:
2301
    case LABEL_REF:
2302
    case CONST:
2303
    case PC:
2304
    case CC0:
2305
    case REG:
2306
    case SCRATCH:
2307
      return 0;
2308
 
2309
    case UNSPEC:
2310
    case UNSPEC_VOLATILE:
2311
      return targetm.unspec_may_trap_p (x, flags);
2312
 
2313
    case ASM_INPUT:
2314
    case TRAP_IF:
2315
      return 1;
2316
 
2317
    case ASM_OPERANDS:
2318
      return MEM_VOLATILE_P (x);
2319
 
2320
      /* Memory ref can trap unless it's a static var or a stack slot.  */
2321
    case MEM:
2322
      /* Recognize specific pattern of stack checking probes.  */
2323
      if (flag_stack_check
2324
          && MEM_VOLATILE_P (x)
2325
          && XEXP (x, 0) == stack_pointer_rtx)
2326
        return 1;
2327
      if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2328
             reference; moving it out of context such as when moving code
2329
             when optimizing, might cause its address to become invalid.  */
2330
          code_changed
2331
          || !MEM_NOTRAP_P (x))
2332
        {
2333
          HOST_WIDE_INT size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : 0;
2334
          return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
2335
                                        GET_MODE (x), code_changed);
2336
        }
2337
 
2338
      return 0;
2339
 
2340
      /* Division by a non-constant might trap.  */
2341
    case DIV:
2342
    case MOD:
2343
    case UDIV:
2344
    case UMOD:
2345
      if (HONOR_SNANS (GET_MODE (x)))
2346
        return 1;
2347
      if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2348
        return flag_trapping_math;
2349
      if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2350
        return 1;
2351
      break;
2352
 
2353
    case EXPR_LIST:
2354
      /* An EXPR_LIST is used to represent a function call.  This
2355
         certainly may trap.  */
2356
      return 1;
2357
 
2358
    case GE:
2359
    case GT:
2360
    case LE:
2361
    case LT:
2362
    case LTGT:
2363
    case COMPARE:
2364
      /* Some floating point comparisons may trap.  */
2365
      if (!flag_trapping_math)
2366
        break;
2367
      /* ??? There is no machine independent way to check for tests that trap
2368
         when COMPARE is used, though many targets do make this distinction.
2369
         For instance, sparc uses CCFPE for compares which generate exceptions
2370
         and CCFP for compares which do not generate exceptions.  */
2371
      if (HONOR_NANS (GET_MODE (x)))
2372
        return 1;
2373
      /* But often the compare has some CC mode, so check operand
2374
         modes as well.  */
2375
      if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2376
          || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2377
        return 1;
2378
      break;
2379
 
2380
    case EQ:
2381
    case NE:
2382
      if (HONOR_SNANS (GET_MODE (x)))
2383
        return 1;
2384
      /* Often comparison is CC mode, so check operand modes.  */
2385
      if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2386
          || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2387
        return 1;
2388
      break;
2389
 
2390
    case FIX:
2391
      /* Conversion of floating point might trap.  */
2392
      if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2393
        return 1;
2394
      break;
2395
 
2396
    case NEG:
2397
    case ABS:
2398
    case SUBREG:
2399
      /* These operations don't trap even with floating point.  */
2400
      break;
2401
 
2402
    default:
2403
      /* Any floating arithmetic may trap.  */
2404
      if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2405
          && flag_trapping_math)
2406
        return 1;
2407
    }
2408
 
2409
  fmt = GET_RTX_FORMAT (code);
2410
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2411
    {
2412
      if (fmt[i] == 'e')
2413
        {
2414
          if (may_trap_p_1 (XEXP (x, i), flags))
2415
            return 1;
2416
        }
2417
      else if (fmt[i] == 'E')
2418
        {
2419
          int j;
2420
          for (j = 0; j < XVECLEN (x, i); j++)
2421
            if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2422
              return 1;
2423
        }
2424
    }
2425
  return 0;
2426
}
2427
 
2428
/* Return nonzero if evaluating rtx X might cause a trap.  */
2429
 
2430
int
2431
may_trap_p (const_rtx x)
2432
{
2433
  return may_trap_p_1 (x, 0);
2434
}
2435
 
2436
/* Same as above, but additionally return nonzero if evaluating rtx X might
2437
   cause a fault.  We define a fault for the purpose of this function as a
2438
   erroneous execution condition that cannot be encountered during the normal
2439
   execution of a valid program; the typical example is an unaligned memory
2440
   access on a strict alignment machine.  The compiler guarantees that it
2441
   doesn't generate code that will fault from a valid program, but this
2442
   guarantee doesn't mean anything for individual instructions.  Consider
2443
   the following example:
2444
 
2445
      struct S { int d; union { char *cp; int *ip; }; };
2446
 
2447
      int foo(struct S *s)
2448
      {
2449
        if (s->d == 1)
2450
          return *s->ip;
2451
        else
2452
          return *s->cp;
2453
      }
2454
 
2455
   on a strict alignment machine.  In a valid program, foo will never be
2456
   invoked on a structure for which d is equal to 1 and the underlying
2457
   unique field of the union not aligned on a 4-byte boundary, but the
2458
   expression *s->ip might cause a fault if considered individually.
2459
 
2460
   At the RTL level, potentially problematic expressions will almost always
2461
   verify may_trap_p; for example, the above dereference can be emitted as
2462
   (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2463
   However, suppose that foo is inlined in a caller that causes s->cp to
2464
   point to a local character variable and guarantees that s->d is not set
2465
   to 1; foo may have been effectively translated into pseudo-RTL as:
2466
 
2467
      if ((reg:SI) == 1)
2468
        (set (reg:SI) (mem:SI (%fp - 7)))
2469
      else
2470
        (set (reg:QI) (mem:QI (%fp - 7)))
2471
 
2472
   Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2473
   memory reference to a stack slot, but it will certainly cause a fault
2474
   on a strict alignment machine.  */
2475
 
2476
int
2477
may_trap_or_fault_p (const_rtx x)
2478
{
2479
  return may_trap_p_1 (x, 1);
2480
}
2481
 
2482
/* Return nonzero if X contains a comparison that is not either EQ or NE,
2483
   i.e., an inequality.  */
2484
 
2485
int
2486
inequality_comparisons_p (const_rtx x)
2487
{
2488
  const char *fmt;
2489
  int len, i;
2490
  const enum rtx_code code = GET_CODE (x);
2491
 
2492
  switch (code)
2493
    {
2494
    case REG:
2495
    case SCRATCH:
2496
    case PC:
2497
    case CC0:
2498
    case CONST_INT:
2499
    case CONST_DOUBLE:
2500
    case CONST_FIXED:
2501
    case CONST_VECTOR:
2502
    case CONST:
2503
    case LABEL_REF:
2504
    case SYMBOL_REF:
2505
      return 0;
2506
 
2507
    case LT:
2508
    case LTU:
2509
    case GT:
2510
    case GTU:
2511
    case LE:
2512
    case LEU:
2513
    case GE:
2514
    case GEU:
2515
      return 1;
2516
 
2517
    default:
2518
      break;
2519
    }
2520
 
2521
  len = GET_RTX_LENGTH (code);
2522
  fmt = GET_RTX_FORMAT (code);
2523
 
2524
  for (i = 0; i < len; i++)
2525
    {
2526
      if (fmt[i] == 'e')
2527
        {
2528
          if (inequality_comparisons_p (XEXP (x, i)))
2529
            return 1;
2530
        }
2531
      else if (fmt[i] == 'E')
2532
        {
2533
          int j;
2534
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2535
            if (inequality_comparisons_p (XVECEXP (x, i, j)))
2536
              return 1;
2537
        }
2538
    }
2539
 
2540
  return 0;
2541
}
2542
 
2543
/* Replace any occurrence of FROM in X with TO.  The function does
2544
   not enter into CONST_DOUBLE for the replace.
2545
 
2546
   Note that copying is not done so X must not be shared unless all copies
2547
   are to be modified.  */
2548
 
2549
rtx
2550
replace_rtx (rtx x, rtx from, rtx to)
2551
{
2552
  int i, j;
2553
  const char *fmt;
2554
 
2555
  /* The following prevents loops occurrence when we change MEM in
2556
     CONST_DOUBLE onto the same CONST_DOUBLE.  */
2557
  if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2558
    return x;
2559
 
2560
  if (x == from)
2561
    return to;
2562
 
2563
  /* Allow this function to make replacements in EXPR_LISTs.  */
2564
  if (x == 0)
2565
    return 0;
2566
 
2567
  if (GET_CODE (x) == SUBREG)
2568
    {
2569
      rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
2570
 
2571
      if (CONST_INT_P (new_rtx))
2572
        {
2573
          x = simplify_subreg (GET_MODE (x), new_rtx,
2574
                               GET_MODE (SUBREG_REG (x)),
2575
                               SUBREG_BYTE (x));
2576
          gcc_assert (x);
2577
        }
2578
      else
2579
        SUBREG_REG (x) = new_rtx;
2580
 
2581
      return x;
2582
    }
2583
  else if (GET_CODE (x) == ZERO_EXTEND)
2584
    {
2585
      rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
2586
 
2587
      if (CONST_INT_P (new_rtx))
2588
        {
2589
          x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2590
                                        new_rtx, GET_MODE (XEXP (x, 0)));
2591
          gcc_assert (x);
2592
        }
2593
      else
2594
        XEXP (x, 0) = new_rtx;
2595
 
2596
      return x;
2597
    }
2598
 
2599
  fmt = GET_RTX_FORMAT (GET_CODE (x));
2600
  for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2601
    {
2602
      if (fmt[i] == 'e')
2603
        XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2604
      else if (fmt[i] == 'E')
2605
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2606
          XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2607
    }
2608
 
2609
  return x;
2610
}
2611
 
2612
/* Replace occurrences of the old label in *X with the new one.
2613
   DATA is a REPLACE_LABEL_DATA containing the old and new labels.  */
2614
 
2615
int
2616
replace_label (rtx *x, void *data)
2617
{
2618
  rtx l = *x;
2619
  rtx old_label = ((replace_label_data *) data)->r1;
2620
  rtx new_label = ((replace_label_data *) data)->r2;
2621
  bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2622
 
2623
  if (l == NULL_RTX)
2624
    return 0;
2625
 
2626
  if (GET_CODE (l) == SYMBOL_REF
2627
      && CONSTANT_POOL_ADDRESS_P (l))
2628
    {
2629
      rtx c = get_pool_constant (l);
2630
      if (rtx_referenced_p (old_label, c))
2631
        {
2632
          rtx new_c, new_l;
2633
          replace_label_data *d = (replace_label_data *) data;
2634
 
2635
          /* Create a copy of constant C; replace the label inside
2636
             but do not update LABEL_NUSES because uses in constant pool
2637
             are not counted.  */
2638
          new_c = copy_rtx (c);
2639
          d->update_label_nuses = false;
2640
          for_each_rtx (&new_c, replace_label, data);
2641
          d->update_label_nuses = update_label_nuses;
2642
 
2643
          /* Add the new constant NEW_C to constant pool and replace
2644
             the old reference to constant by new reference.  */
2645
          new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2646
          *x = replace_rtx (l, l, new_l);
2647
        }
2648
      return 0;
2649
    }
2650
 
2651
  /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2652
     field.  This is not handled by for_each_rtx because it doesn't
2653
     handle unprinted ('0') fields.  */
2654
  if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2655
    JUMP_LABEL (l) = new_label;
2656
 
2657
  if ((GET_CODE (l) == LABEL_REF
2658
       || GET_CODE (l) == INSN_LIST)
2659
      && XEXP (l, 0) == old_label)
2660
    {
2661
      XEXP (l, 0) = new_label;
2662
      if (update_label_nuses)
2663
        {
2664
          ++LABEL_NUSES (new_label);
2665
          --LABEL_NUSES (old_label);
2666
        }
2667
      return 0;
2668
    }
2669
 
2670
  return 0;
2671
}
2672
 
2673
/* When *BODY is equal to X or X is directly referenced by *BODY
2674
   return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2675
   too, otherwise FOR_EACH_RTX continues traversing *BODY.  */
2676
 
2677
static int
2678
rtx_referenced_p_1 (rtx *body, void *x)
2679
{
2680
  rtx y = (rtx) x;
2681
 
2682
  if (*body == NULL_RTX)
2683
    return y == NULL_RTX;
2684
 
2685
  /* Return true if a label_ref *BODY refers to label Y.  */
2686
  if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2687
    return XEXP (*body, 0) == y;
2688
 
2689
  /* If *BODY is a reference to pool constant traverse the constant.  */
2690
  if (GET_CODE (*body) == SYMBOL_REF
2691
      && CONSTANT_POOL_ADDRESS_P (*body))
2692
    return rtx_referenced_p (y, get_pool_constant (*body));
2693
 
2694
  /* By default, compare the RTL expressions.  */
2695
  return rtx_equal_p (*body, y);
2696
}
2697
 
2698
/* Return true if X is referenced in BODY.  */
2699
 
2700
int
2701
rtx_referenced_p (rtx x, rtx body)
2702
{
2703
  return for_each_rtx (&body, rtx_referenced_p_1, x);
2704
}
2705
 
2706
/* If INSN is a tablejump return true and store the label (before jump table) to
2707
   *LABELP and the jump table to *TABLEP.  LABELP and TABLEP may be NULL.  */
2708
 
2709
bool
2710
tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
2711
{
2712
  rtx label, table;
2713
 
2714
  if (!JUMP_P (insn))
2715
    return false;
2716
 
2717
  label = JUMP_LABEL (insn);
2718
  if (label != NULL_RTX && !ANY_RETURN_P (label)
2719
      && (table = next_active_insn (label)) != NULL_RTX
2720
      && JUMP_TABLE_DATA_P (table))
2721
    {
2722
      if (labelp)
2723
        *labelp = label;
2724
      if (tablep)
2725
        *tablep = table;
2726
      return true;
2727
    }
2728
  return false;
2729
}
2730
 
2731
/* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2732
   constant that is not in the constant pool and not in the condition
2733
   of an IF_THEN_ELSE.  */
2734
 
2735
static int
2736
computed_jump_p_1 (const_rtx x)
2737
{
2738
  const enum rtx_code code = GET_CODE (x);
2739
  int i, j;
2740
  const char *fmt;
2741
 
2742
  switch (code)
2743
    {
2744
    case LABEL_REF:
2745
    case PC:
2746
      return 0;
2747
 
2748
    case CONST:
2749
    case CONST_INT:
2750
    case CONST_DOUBLE:
2751
    case CONST_FIXED:
2752
    case CONST_VECTOR:
2753
    case SYMBOL_REF:
2754
    case REG:
2755
      return 1;
2756
 
2757
    case MEM:
2758
      return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2759
                && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2760
 
2761
    case IF_THEN_ELSE:
2762
      return (computed_jump_p_1 (XEXP (x, 1))
2763
              || computed_jump_p_1 (XEXP (x, 2)));
2764
 
2765
    default:
2766
      break;
2767
    }
2768
 
2769
  fmt = GET_RTX_FORMAT (code);
2770
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2771
    {
2772
      if (fmt[i] == 'e'
2773
          && computed_jump_p_1 (XEXP (x, i)))
2774
        return 1;
2775
 
2776
      else if (fmt[i] == 'E')
2777
        for (j = 0; j < XVECLEN (x, i); j++)
2778
          if (computed_jump_p_1 (XVECEXP (x, i, j)))
2779
            return 1;
2780
    }
2781
 
2782
  return 0;
2783
}
2784
 
2785
/* Return nonzero if INSN is an indirect jump (aka computed jump).
2786
 
2787
   Tablejumps and casesi insns are not considered indirect jumps;
2788
   we can recognize them by a (use (label_ref)).  */
2789
 
2790
int
2791
computed_jump_p (const_rtx insn)
2792
{
2793
  int i;
2794
  if (JUMP_P (insn))
2795
    {
2796
      rtx pat = PATTERN (insn);
2797
 
2798
      /* If we have a JUMP_LABEL set, we're not a computed jump.  */
2799
      if (JUMP_LABEL (insn) != NULL)
2800
        return 0;
2801
 
2802
      if (GET_CODE (pat) == PARALLEL)
2803
        {
2804
          int len = XVECLEN (pat, 0);
2805
          int has_use_labelref = 0;
2806
 
2807
          for (i = len - 1; i >= 0; i--)
2808
            if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2809
                && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2810
                    == LABEL_REF))
2811
              has_use_labelref = 1;
2812
 
2813
          if (! has_use_labelref)
2814
            for (i = len - 1; i >= 0; i--)
2815
              if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2816
                  && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2817
                  && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2818
                return 1;
2819
        }
2820
      else if (GET_CODE (pat) == SET
2821
               && SET_DEST (pat) == pc_rtx
2822
               && computed_jump_p_1 (SET_SRC (pat)))
2823
        return 1;
2824
    }
2825
  return 0;
2826
}
2827
 
2828
/* Optimized loop of for_each_rtx, trying to avoid useless recursive
2829
   calls.  Processes the subexpressions of EXP and passes them to F.  */
2830
static int
2831
for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2832
{
2833
  int result, i, j;
2834
  const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2835
  rtx *x;
2836
 
2837
  for (; format[n] != '\0'; n++)
2838
    {
2839
      switch (format[n])
2840
        {
2841
        case 'e':
2842
          /* Call F on X.  */
2843
          x = &XEXP (exp, n);
2844
          result = (*f) (x, data);
2845
          if (result == -1)
2846
            /* Do not traverse sub-expressions.  */
2847
            continue;
2848
          else if (result != 0)
2849
            /* Stop the traversal.  */
2850
            return result;
2851
 
2852
          if (*x == NULL_RTX)
2853
            /* There are no sub-expressions.  */
2854
            continue;
2855
 
2856
          i = non_rtx_starting_operands[GET_CODE (*x)];
2857
          if (i >= 0)
2858
            {
2859
              result = for_each_rtx_1 (*x, i, f, data);
2860
              if (result != 0)
2861
                return result;
2862
            }
2863
          break;
2864
 
2865
        case 'V':
2866
        case 'E':
2867
          if (XVEC (exp, n) == 0)
2868
            continue;
2869
          for (j = 0; j < XVECLEN (exp, n); ++j)
2870
            {
2871
              /* Call F on X.  */
2872
              x = &XVECEXP (exp, n, j);
2873
              result = (*f) (x, data);
2874
              if (result == -1)
2875
                /* Do not traverse sub-expressions.  */
2876
                continue;
2877
              else if (result != 0)
2878
                /* Stop the traversal.  */
2879
                return result;
2880
 
2881
              if (*x == NULL_RTX)
2882
                /* There are no sub-expressions.  */
2883
                continue;
2884
 
2885
              i = non_rtx_starting_operands[GET_CODE (*x)];
2886
              if (i >= 0)
2887
                {
2888
                  result = for_each_rtx_1 (*x, i, f, data);
2889
                  if (result != 0)
2890
                    return result;
2891
                }
2892
            }
2893
          break;
2894
 
2895
        default:
2896
          /* Nothing to do.  */
2897
          break;
2898
        }
2899
    }
2900
 
2901
  return 0;
2902
}
2903
 
2904
/* Traverse X via depth-first search, calling F for each
2905
   sub-expression (including X itself).  F is also passed the DATA.
2906
   If F returns -1, do not traverse sub-expressions, but continue
2907
   traversing the rest of the tree.  If F ever returns any other
2908
   nonzero value, stop the traversal, and return the value returned
2909
   by F.  Otherwise, return 0.  This function does not traverse inside
2910
   tree structure that contains RTX_EXPRs, or into sub-expressions
2911
   whose format code is `0' since it is not known whether or not those
2912
   codes are actually RTL.
2913
 
2914
   This routine is very general, and could (should?) be used to
2915
   implement many of the other routines in this file.  */
2916
 
2917
int
2918
for_each_rtx (rtx *x, rtx_function f, void *data)
2919
{
2920
  int result;
2921
  int i;
2922
 
2923
  /* Call F on X.  */
2924
  result = (*f) (x, data);
2925
  if (result == -1)
2926
    /* Do not traverse sub-expressions.  */
2927
    return 0;
2928
  else if (result != 0)
2929
    /* Stop the traversal.  */
2930
    return result;
2931
 
2932
  if (*x == NULL_RTX)
2933
    /* There are no sub-expressions.  */
2934
    return 0;
2935
 
2936
  i = non_rtx_starting_operands[GET_CODE (*x)];
2937
  if (i < 0)
2938
    return 0;
2939
 
2940
  return for_each_rtx_1 (*x, i, f, data);
2941
}
2942
 
2943
 
2944
 
2945
/* Data structure that holds the internal state communicated between
2946
   for_each_inc_dec, for_each_inc_dec_find_mem and
2947
   for_each_inc_dec_find_inc_dec.  */
2948
 
2949
struct for_each_inc_dec_ops {
2950
  /* The function to be called for each autoinc operation found.  */
2951
  for_each_inc_dec_fn fn;
2952
  /* The opaque argument to be passed to it.  */
2953
  void *arg;
2954
  /* The MEM we're visiting, if any.  */
2955
  rtx mem;
2956
};
2957
 
2958
static int for_each_inc_dec_find_mem (rtx *r, void *d);
2959
 
2960
/* Find PRE/POST-INC/DEC/MODIFY operations within *R, extract the
2961
   operands of the equivalent add insn and pass the result to the
2962
   operator specified by *D.  */
2963
 
2964
static int
2965
for_each_inc_dec_find_inc_dec (rtx *r, void *d)
2966
{
2967
  rtx x = *r;
2968
  struct for_each_inc_dec_ops *data = (struct for_each_inc_dec_ops *)d;
2969
 
2970
  switch (GET_CODE (x))
2971
    {
2972
    case PRE_INC:
2973
    case POST_INC:
2974
      {
2975
        int size = GET_MODE_SIZE (GET_MODE (data->mem));
2976
        rtx r1 = XEXP (x, 0);
2977
        rtx c = gen_int_mode (size, GET_MODE (r1));
2978
        return data->fn (data->mem, x, r1, r1, c, data->arg);
2979
      }
2980
 
2981
    case PRE_DEC:
2982
    case POST_DEC:
2983
      {
2984
        int size = GET_MODE_SIZE (GET_MODE (data->mem));
2985
        rtx r1 = XEXP (x, 0);
2986
        rtx c = gen_int_mode (-size, GET_MODE (r1));
2987
        return data->fn (data->mem, x, r1, r1, c, data->arg);
2988
      }
2989
 
2990
    case PRE_MODIFY:
2991
    case POST_MODIFY:
2992
      {
2993
        rtx r1 = XEXP (x, 0);
2994
        rtx add = XEXP (x, 1);
2995
        return data->fn (data->mem, x, r1, add, NULL, data->arg);
2996
      }
2997
 
2998
    case MEM:
2999
      {
3000
        rtx save = data->mem;
3001
        int ret = for_each_inc_dec_find_mem (r, d);
3002
        data->mem = save;
3003
        return ret;
3004
      }
3005
 
3006
    default:
3007
      return 0;
3008
    }
3009
}
3010
 
3011
/* If *R is a MEM, find PRE/POST-INC/DEC/MODIFY operations within its
3012
   address, extract the operands of the equivalent add insn and pass
3013
   the result to the operator specified by *D.  */
3014
 
3015
static int
3016
for_each_inc_dec_find_mem (rtx *r, void *d)
3017
{
3018
  rtx x = *r;
3019
  if (x != NULL_RTX && MEM_P (x))
3020
    {
3021
      struct for_each_inc_dec_ops *data = (struct for_each_inc_dec_ops *) d;
3022
      int result;
3023
 
3024
      data->mem = x;
3025
 
3026
      result = for_each_rtx (&XEXP (x, 0), for_each_inc_dec_find_inc_dec,
3027
                             data);
3028
      if (result)
3029
        return result;
3030
 
3031
      return -1;
3032
    }
3033
  return 0;
3034
}
3035
 
3036
/* Traverse *X looking for MEMs, and for autoinc operations within
3037
   them.  For each such autoinc operation found, call FN, passing it
3038
   the innermost enclosing MEM, the operation itself, the RTX modified
3039
   by the operation, two RTXs (the second may be NULL) that, once
3040
   added, represent the value to be held by the modified RTX
3041
   afterwards, and ARG.  FN is to return -1 to skip looking for other
3042
   autoinc operations within the visited operation, 0 to continue the
3043
   traversal, or any other value to have it returned to the caller of
3044
   for_each_inc_dec.  */
3045
 
3046
int
3047
for_each_inc_dec (rtx *x,
3048
                  for_each_inc_dec_fn fn,
3049
                  void *arg)
3050
{
3051
  struct for_each_inc_dec_ops data;
3052
 
3053
  data.fn = fn;
3054
  data.arg = arg;
3055
  data.mem = NULL;
3056
 
3057
  return for_each_rtx (x, for_each_inc_dec_find_mem, &data);
3058
}
3059
 
3060
 
3061
/* Searches X for any reference to REGNO, returning the rtx of the
3062
   reference found if any.  Otherwise, returns NULL_RTX.  */
3063
 
3064
rtx
3065
regno_use_in (unsigned int regno, rtx x)
3066
{
3067
  const char *fmt;
3068
  int i, j;
3069
  rtx tem;
3070
 
3071
  if (REG_P (x) && REGNO (x) == regno)
3072
    return x;
3073
 
3074
  fmt = GET_RTX_FORMAT (GET_CODE (x));
3075
  for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3076
    {
3077
      if (fmt[i] == 'e')
3078
        {
3079
          if ((tem = regno_use_in (regno, XEXP (x, i))))
3080
            return tem;
3081
        }
3082
      else if (fmt[i] == 'E')
3083
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3084
          if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3085
            return tem;
3086
    }
3087
 
3088
  return NULL_RTX;
3089
}
3090
 
3091
/* Return a value indicating whether OP, an operand of a commutative
3092
   operation, is preferred as the first or second operand.  The higher
3093
   the value, the stronger the preference for being the first operand.
3094
   We use negative values to indicate a preference for the first operand
3095
   and positive values for the second operand.  */
3096
 
3097
int
3098
commutative_operand_precedence (rtx op)
3099
{
3100
  enum rtx_code code = GET_CODE (op);
3101
 
3102
  /* Constants always come the second operand.  Prefer "nice" constants.  */
3103
  if (code == CONST_INT)
3104
    return -8;
3105
  if (code == CONST_DOUBLE)
3106
    return -7;
3107
  if (code == CONST_FIXED)
3108
    return -7;
3109
  op = avoid_constant_pool_reference (op);
3110
  code = GET_CODE (op);
3111
 
3112
  switch (GET_RTX_CLASS (code))
3113
    {
3114
    case RTX_CONST_OBJ:
3115
      if (code == CONST_INT)
3116
        return -6;
3117
      if (code == CONST_DOUBLE)
3118
        return -5;
3119
      if (code == CONST_FIXED)
3120
        return -5;
3121
      return -4;
3122
 
3123
    case RTX_EXTRA:
3124
      /* SUBREGs of objects should come second.  */
3125
      if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
3126
        return -3;
3127
      return 0;
3128
 
3129
    case RTX_OBJ:
3130
      /* Complex expressions should be the first, so decrease priority
3131
         of objects.  Prefer pointer objects over non pointer objects.  */
3132
      if ((REG_P (op) && REG_POINTER (op))
3133
          || (MEM_P (op) && MEM_POINTER (op)))
3134
        return -1;
3135
      return -2;
3136
 
3137
    case RTX_COMM_ARITH:
3138
      /* Prefer operands that are themselves commutative to be first.
3139
         This helps to make things linear.  In particular,
3140
         (and (and (reg) (reg)) (not (reg))) is canonical.  */
3141
      return 4;
3142
 
3143
    case RTX_BIN_ARITH:
3144
      /* If only one operand is a binary expression, it will be the first
3145
         operand.  In particular,  (plus (minus (reg) (reg)) (neg (reg)))
3146
         is canonical, although it will usually be further simplified.  */
3147
      return 2;
3148
 
3149
    case RTX_UNARY:
3150
      /* Then prefer NEG and NOT.  */
3151
      if (code == NEG || code == NOT)
3152
        return 1;
3153
 
3154
    default:
3155
      return 0;
3156
    }
3157
}
3158
 
3159
/* Return 1 iff it is necessary to swap operands of commutative operation
3160
   in order to canonicalize expression.  */
3161
 
3162
bool
3163
swap_commutative_operands_p (rtx x, rtx y)
3164
{
3165
  return (commutative_operand_precedence (x)
3166
          < commutative_operand_precedence (y));
3167
}
3168
 
3169
/* Return 1 if X is an autoincrement side effect and the register is
3170
   not the stack pointer.  */
3171
int
3172
auto_inc_p (const_rtx x)
3173
{
3174
  switch (GET_CODE (x))
3175
    {
3176
    case PRE_INC:
3177
    case POST_INC:
3178
    case PRE_DEC:
3179
    case POST_DEC:
3180
    case PRE_MODIFY:
3181
    case POST_MODIFY:
3182
      /* There are no REG_INC notes for SP.  */
3183
      if (XEXP (x, 0) != stack_pointer_rtx)
3184
        return 1;
3185
    default:
3186
      break;
3187
    }
3188
  return 0;
3189
}
3190
 
3191
/* Return nonzero if IN contains a piece of rtl that has the address LOC.  */
3192
int
3193
loc_mentioned_in_p (rtx *loc, const_rtx in)
3194
{
3195
  enum rtx_code code;
3196
  const char *fmt;
3197
  int i, j;
3198
 
3199
  if (!in)
3200
    return 0;
3201
 
3202
  code = GET_CODE (in);
3203
  fmt = GET_RTX_FORMAT (code);
3204
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3205
    {
3206
      if (fmt[i] == 'e')
3207
        {
3208
          if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
3209
            return 1;
3210
        }
3211
      else if (fmt[i] == 'E')
3212
        for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3213
          if (loc == &XVECEXP (in, i, j)
3214
              || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3215
            return 1;
3216
    }
3217
  return 0;
3218
}
3219
 
3220
/* Helper function for subreg_lsb.  Given a subreg's OUTER_MODE, INNER_MODE,
3221
   and SUBREG_BYTE, return the bit offset where the subreg begins
3222
   (counting from the least significant bit of the operand).  */
3223
 
3224
unsigned int
3225
subreg_lsb_1 (enum machine_mode outer_mode,
3226
              enum machine_mode inner_mode,
3227
              unsigned int subreg_byte)
3228
{
3229
  unsigned int bitpos;
3230
  unsigned int byte;
3231
  unsigned int word;
3232
 
3233
  /* A paradoxical subreg begins at bit position 0.  */
3234
  if (GET_MODE_PRECISION (outer_mode) > GET_MODE_PRECISION (inner_mode))
3235
    return 0;
3236
 
3237
  if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3238
    /* If the subreg crosses a word boundary ensure that
3239
       it also begins and ends on a word boundary.  */
3240
    gcc_assert (!((subreg_byte % UNITS_PER_WORD
3241
                  + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3242
                  && (subreg_byte % UNITS_PER_WORD
3243
                      || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3244
 
3245
  if (WORDS_BIG_ENDIAN)
3246
    word = (GET_MODE_SIZE (inner_mode)
3247
            - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3248
  else
3249
    word = subreg_byte / UNITS_PER_WORD;
3250
  bitpos = word * BITS_PER_WORD;
3251
 
3252
  if (BYTES_BIG_ENDIAN)
3253
    byte = (GET_MODE_SIZE (inner_mode)
3254
            - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3255
  else
3256
    byte = subreg_byte % UNITS_PER_WORD;
3257
  bitpos += byte * BITS_PER_UNIT;
3258
 
3259
  return bitpos;
3260
}
3261
 
3262
/* Given a subreg X, return the bit offset where the subreg begins
3263
   (counting from the least significant bit of the reg).  */
3264
 
3265
unsigned int
3266
subreg_lsb (const_rtx x)
3267
{
3268
  return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3269
                       SUBREG_BYTE (x));
3270
}
3271
 
3272
/* Fill in information about a subreg of a hard register.
3273
   xregno - A regno of an inner hard subreg_reg (or what will become one).
3274
   xmode  - The mode of xregno.
3275
   offset - The byte offset.
3276
   ymode  - The mode of a top level SUBREG (or what may become one).
3277
   info   - Pointer to structure to fill in.  */
3278
void
3279
subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3280
                 unsigned int offset, enum machine_mode ymode,
3281
                 struct subreg_info *info)
3282
{
3283
  int nregs_xmode, nregs_ymode;
3284
  int mode_multiple, nregs_multiple;
3285
  int offset_adj, y_offset, y_offset_adj;
3286
  int regsize_xmode, regsize_ymode;
3287
  bool rknown;
3288
 
3289
  gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3290
 
3291
  rknown = false;
3292
 
3293
  /* If there are holes in a non-scalar mode in registers, we expect
3294
     that it is made up of its units concatenated together.  */
3295
  if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3296
    {
3297
      enum machine_mode xmode_unit;
3298
 
3299
      nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3300
      if (GET_MODE_INNER (xmode) == VOIDmode)
3301
        xmode_unit = xmode;
3302
      else
3303
        xmode_unit = GET_MODE_INNER (xmode);
3304
      gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3305
      gcc_assert (nregs_xmode
3306
                  == (GET_MODE_NUNITS (xmode)
3307
                      * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3308
      gcc_assert (hard_regno_nregs[xregno][xmode]
3309
                  == (hard_regno_nregs[xregno][xmode_unit]
3310
                      * GET_MODE_NUNITS (xmode)));
3311
 
3312
      /* You can only ask for a SUBREG of a value with holes in the middle
3313
         if you don't cross the holes.  (Such a SUBREG should be done by
3314
         picking a different register class, or doing it in memory if
3315
         necessary.)  An example of a value with holes is XCmode on 32-bit
3316
         x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3317
         3 for each part, but in memory it's two 128-bit parts.
3318
         Padding is assumed to be at the end (not necessarily the 'high part')
3319
         of each unit.  */
3320
      if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3321
           < GET_MODE_NUNITS (xmode))
3322
          && (offset / GET_MODE_SIZE (xmode_unit)
3323
              != ((offset + GET_MODE_SIZE (ymode) - 1)
3324
                  / GET_MODE_SIZE (xmode_unit))))
3325
        {
3326
          info->representable_p = false;
3327
          rknown = true;
3328
        }
3329
    }
3330
  else
3331
    nregs_xmode = hard_regno_nregs[xregno][xmode];
3332
 
3333
  nregs_ymode = hard_regno_nregs[xregno][ymode];
3334
 
3335
  /* Paradoxical subregs are otherwise valid.  */
3336
  if (!rknown
3337
      && offset == 0
3338
      && GET_MODE_PRECISION (ymode) > GET_MODE_PRECISION (xmode))
3339
    {
3340
      info->representable_p = true;
3341
      /* If this is a big endian paradoxical subreg, which uses more
3342
         actual hard registers than the original register, we must
3343
         return a negative offset so that we find the proper highpart
3344
         of the register.  */
3345
      if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3346
          ? REG_WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3347
        info->offset = nregs_xmode - nregs_ymode;
3348
      else
3349
        info->offset = 0;
3350
      info->nregs = nregs_ymode;
3351
      return;
3352
    }
3353
 
3354
  /* If registers store different numbers of bits in the different
3355
     modes, we cannot generally form this subreg.  */
3356
  if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3357
      && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3358
      && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3359
      && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
3360
    {
3361
      regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3362
      regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3363
      if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3364
        {
3365
          info->representable_p = false;
3366
          info->nregs
3367
            = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3368
          info->offset = offset / regsize_xmode;
3369
          return;
3370
        }
3371
      if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3372
        {
3373
          info->representable_p = false;
3374
          info->nregs
3375
            = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3376
          info->offset = offset / regsize_xmode;
3377
          return;
3378
        }
3379
    }
3380
 
3381
  /* Lowpart subregs are otherwise valid.  */
3382
  if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3383
    {
3384
      info->representable_p = true;
3385
      rknown = true;
3386
 
3387
      if (offset == 0 || nregs_xmode == nregs_ymode)
3388
        {
3389
          info->offset = 0;
3390
          info->nregs = nregs_ymode;
3391
          return;
3392
        }
3393
    }
3394
 
3395
  /* This should always pass, otherwise we don't know how to verify
3396
     the constraint.  These conditions may be relaxed but
3397
     subreg_regno_offset would need to be redesigned.  */
3398
  gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3399
  gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3400
 
3401
  if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN
3402
      && GET_MODE_SIZE (xmode) > UNITS_PER_WORD)
3403
    {
3404
      HOST_WIDE_INT xsize = GET_MODE_SIZE (xmode);
3405
      HOST_WIDE_INT ysize = GET_MODE_SIZE (ymode);
3406
      HOST_WIDE_INT off_low = offset & (ysize - 1);
3407
      HOST_WIDE_INT off_high = offset & ~(ysize - 1);
3408
      offset = (xsize - ysize - off_high) | off_low;
3409
    }
3410
  /* The XMODE value can be seen as a vector of NREGS_XMODE
3411
     values.  The subreg must represent a lowpart of given field.
3412
     Compute what field it is.  */
3413
  offset_adj = offset;
3414
  offset_adj -= subreg_lowpart_offset (ymode,
3415
                                       mode_for_size (GET_MODE_BITSIZE (xmode)
3416
                                                      / nregs_xmode,
3417
                                                      MODE_INT, 0));
3418
 
3419
  /* Size of ymode must not be greater than the size of xmode.  */
3420
  mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3421
  gcc_assert (mode_multiple != 0);
3422
 
3423
  y_offset = offset / GET_MODE_SIZE (ymode);
3424
  y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3425
  nregs_multiple = nregs_xmode / nregs_ymode;
3426
 
3427
  gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
3428
  gcc_assert ((mode_multiple % nregs_multiple) == 0);
3429
 
3430
  if (!rknown)
3431
    {
3432
      info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3433
      rknown = true;
3434
    }
3435
  info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3436
  info->nregs = nregs_ymode;
3437
}
3438
 
3439
/* This function returns the regno offset of a subreg expression.
3440
   xregno - A regno of an inner hard subreg_reg (or what will become one).
3441
   xmode  - The mode of xregno.
3442
   offset - The byte offset.
3443
   ymode  - The mode of a top level SUBREG (or what may become one).
3444
   RETURN - The regno offset which would be used.  */
3445
unsigned int
3446
subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3447
                     unsigned int offset, enum machine_mode ymode)
3448
{
3449
  struct subreg_info info;
3450
  subreg_get_info (xregno, xmode, offset, ymode, &info);
3451
  return info.offset;
3452
}
3453
 
3454
/* This function returns true when the offset is representable via
3455
   subreg_offset in the given regno.
3456
   xregno - A regno of an inner hard subreg_reg (or what will become one).
3457
   xmode  - The mode of xregno.
3458
   offset - The byte offset.
3459
   ymode  - The mode of a top level SUBREG (or what may become one).
3460
   RETURN - Whether the offset is representable.  */
3461
bool
3462
subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3463
                               unsigned int offset, enum machine_mode ymode)
3464
{
3465
  struct subreg_info info;
3466
  subreg_get_info (xregno, xmode, offset, ymode, &info);
3467
  return info.representable_p;
3468
}
3469
 
3470
/* Return the number of a YMODE register to which
3471
 
3472
       (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
3473
 
3474
   can be simplified.  Return -1 if the subreg can't be simplified.
3475
 
3476
   XREGNO is a hard register number.  */
3477
 
3478
int
3479
simplify_subreg_regno (unsigned int xregno, enum machine_mode xmode,
3480
                       unsigned int offset, enum machine_mode ymode)
3481
{
3482
  struct subreg_info info;
3483
  unsigned int yregno;
3484
 
3485
#ifdef CANNOT_CHANGE_MODE_CLASS
3486
  /* Give the backend a chance to disallow the mode change.  */
3487
  if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
3488
      && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
3489
      && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode))
3490
    return -1;
3491
#endif
3492
 
3493
  /* We shouldn't simplify stack-related registers.  */
3494
  if ((!reload_completed || frame_pointer_needed)
3495
      && xregno == FRAME_POINTER_REGNUM)
3496
    return -1;
3497
 
3498
  if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3499
      && xregno == ARG_POINTER_REGNUM)
3500
    return -1;
3501
 
3502
  if (xregno == STACK_POINTER_REGNUM)
3503
    return -1;
3504
 
3505
  /* Try to get the register offset.  */
3506
  subreg_get_info (xregno, xmode, offset, ymode, &info);
3507
  if (!info.representable_p)
3508
    return -1;
3509
 
3510
  /* Make sure that the offsetted register value is in range.  */
3511
  yregno = xregno + info.offset;
3512
  if (!HARD_REGISTER_NUM_P (yregno))
3513
    return -1;
3514
 
3515
  /* See whether (reg:YMODE YREGNO) is valid.
3516
 
3517
     ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
3518
     This is a kludge to work around how complex FP arguments are passed
3519
     on IA-64 and should be fixed.  See PR target/49226.  */
3520
  if (!HARD_REGNO_MODE_OK (yregno, ymode)
3521
      && HARD_REGNO_MODE_OK (xregno, xmode))
3522
    return -1;
3523
 
3524
  return (int) yregno;
3525
}
3526
 
3527
/* Return the final regno that a subreg expression refers to.  */
3528
unsigned int
3529
subreg_regno (const_rtx x)
3530
{
3531
  unsigned int ret;
3532
  rtx subreg = SUBREG_REG (x);
3533
  int regno = REGNO (subreg);
3534
 
3535
  ret = regno + subreg_regno_offset (regno,
3536
                                     GET_MODE (subreg),
3537
                                     SUBREG_BYTE (x),
3538
                                     GET_MODE (x));
3539
  return ret;
3540
 
3541
}
3542
 
3543
/* Return the number of registers that a subreg expression refers
3544
   to.  */
3545
unsigned int
3546
subreg_nregs (const_rtx x)
3547
{
3548
  return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3549
}
3550
 
3551
/* Return the number of registers that a subreg REG with REGNO
3552
   expression refers to.  This is a copy of the rtlanal.c:subreg_nregs
3553
   changed so that the regno can be passed in. */
3554
 
3555
unsigned int
3556
subreg_nregs_with_regno (unsigned int regno, const_rtx x)
3557
{
3558
  struct subreg_info info;
3559
  rtx subreg = SUBREG_REG (x);
3560
 
3561
  subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3562
                   &info);
3563
  return info.nregs;
3564
}
3565
 
3566
 
3567
struct parms_set_data
3568
{
3569
  int nregs;
3570
  HARD_REG_SET regs;
3571
};
3572
 
3573
/* Helper function for noticing stores to parameter registers.  */
3574
static void
3575
parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
3576
{
3577
  struct parms_set_data *const d = (struct parms_set_data *) data;
3578
  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3579
      && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3580
    {
3581
      CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3582
      d->nregs--;
3583
    }
3584
}
3585
 
3586
/* Look backward for first parameter to be loaded.
3587
   Note that loads of all parameters will not necessarily be
3588
   found if CSE has eliminated some of them (e.g., an argument
3589
   to the outer function is passed down as a parameter).
3590
   Do not skip BOUNDARY.  */
3591
rtx
3592
find_first_parameter_load (rtx call_insn, rtx boundary)
3593
{
3594
  struct parms_set_data parm;
3595
  rtx p, before, first_set;
3596
 
3597
  /* Since different machines initialize their parameter registers
3598
     in different orders, assume nothing.  Collect the set of all
3599
     parameter registers.  */
3600
  CLEAR_HARD_REG_SET (parm.regs);
3601
  parm.nregs = 0;
3602
  for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3603
    if (GET_CODE (XEXP (p, 0)) == USE
3604
        && REG_P (XEXP (XEXP (p, 0), 0)))
3605
      {
3606
        gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3607
 
3608
        /* We only care about registers which can hold function
3609
           arguments.  */
3610
        if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3611
          continue;
3612
 
3613
        SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3614
        parm.nregs++;
3615
      }
3616
  before = call_insn;
3617
  first_set = call_insn;
3618
 
3619
  /* Search backward for the first set of a register in this set.  */
3620
  while (parm.nregs && before != boundary)
3621
    {
3622
      before = PREV_INSN (before);
3623
 
3624
      /* It is possible that some loads got CSEed from one call to
3625
         another.  Stop in that case.  */
3626
      if (CALL_P (before))
3627
        break;
3628
 
3629
      /* Our caller needs either ensure that we will find all sets
3630
         (in case code has not been optimized yet), or take care
3631
         for possible labels in a way by setting boundary to preceding
3632
         CODE_LABEL.  */
3633
      if (LABEL_P (before))
3634
        {
3635
          gcc_assert (before == boundary);
3636
          break;
3637
        }
3638
 
3639
      if (INSN_P (before))
3640
        {
3641
          int nregs_old = parm.nregs;
3642
          note_stores (PATTERN (before), parms_set, &parm);
3643
          /* If we found something that did not set a parameter reg,
3644
             we're done.  Do not keep going, as that might result
3645
             in hoisting an insn before the setting of a pseudo
3646
             that is used by the hoisted insn. */
3647
          if (nregs_old != parm.nregs)
3648
            first_set = before;
3649
          else
3650
            break;
3651
        }
3652
    }
3653
  return first_set;
3654
}
3655
 
3656
/* Return true if we should avoid inserting code between INSN and preceding
3657
   call instruction.  */
3658
 
3659
bool
3660
keep_with_call_p (const_rtx insn)
3661
{
3662
  rtx set;
3663
 
3664
  if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3665
    {
3666
      if (REG_P (SET_DEST (set))
3667
          && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3668
          && fixed_regs[REGNO (SET_DEST (set))]
3669
          && general_operand (SET_SRC (set), VOIDmode))
3670
        return true;
3671
      if (REG_P (SET_SRC (set))
3672
          && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set)))
3673
          && REG_P (SET_DEST (set))
3674
          && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3675
        return true;
3676
      /* There may be a stack pop just after the call and before the store
3677
         of the return register.  Search for the actual store when deciding
3678
         if we can break or not.  */
3679
      if (SET_DEST (set) == stack_pointer_rtx)
3680
        {
3681
          /* This CONST_CAST is okay because next_nonnote_insn just
3682
             returns its argument and we assign it to a const_rtx
3683
             variable.  */
3684
          const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX(insn));
3685
          if (i2 && keep_with_call_p (i2))
3686
            return true;
3687
        }
3688
    }
3689
  return false;
3690
}
3691
 
3692
/* Return true if LABEL is a target of JUMP_INSN.  This applies only
3693
   to non-complex jumps.  That is, direct unconditional, conditional,
3694
   and tablejumps, but not computed jumps or returns.  It also does
3695
   not apply to the fallthru case of a conditional jump.  */
3696
 
3697
bool
3698
label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
3699
{
3700
  rtx tmp = JUMP_LABEL (jump_insn);
3701
 
3702
  if (label == tmp)
3703
    return true;
3704
 
3705
  if (tablejump_p (jump_insn, NULL, &tmp))
3706
    {
3707
      rtvec vec = XVEC (PATTERN (tmp),
3708
                        GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3709
      int i, veclen = GET_NUM_ELEM (vec);
3710
 
3711
      for (i = 0; i < veclen; ++i)
3712
        if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3713
          return true;
3714
    }
3715
 
3716
  if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3717
    return true;
3718
 
3719
  return false;
3720
}
3721
 
3722
 
3723
/* Return an estimate of the cost of computing rtx X.
3724
   One use is in cse, to decide which expression to keep in the hash table.
3725
   Another is in rtl generation, to pick the cheapest way to multiply.
3726
   Other uses like the latter are expected in the future.
3727
 
3728
   X appears as operand OPNO in an expression with code OUTER_CODE.
3729
   SPEED specifies whether costs optimized for speed or size should
3730
   be returned.  */
3731
 
3732
int
3733
rtx_cost (rtx x, enum rtx_code outer_code, int opno, bool speed)
3734
{
3735
  int i, j;
3736
  enum rtx_code code;
3737
  const char *fmt;
3738
  int total;
3739
 
3740
  if (x == 0)
3741
    return 0;
3742
 
3743
  /* Compute the default costs of certain things.
3744
     Note that targetm.rtx_costs can override the defaults.  */
3745
 
3746
  code = GET_CODE (x);
3747
  switch (code)
3748
    {
3749
    case MULT:
3750
      total = COSTS_N_INSNS (5);
3751
      break;
3752
    case DIV:
3753
    case UDIV:
3754
    case MOD:
3755
    case UMOD:
3756
      total = COSTS_N_INSNS (7);
3757
      break;
3758
    case USE:
3759
      /* Used in combine.c as a marker.  */
3760
      total = 0;
3761
      break;
3762
    default:
3763
      total = COSTS_N_INSNS (1);
3764
    }
3765
 
3766
  switch (code)
3767
    {
3768
    case REG:
3769
      return 0;
3770
 
3771
    case SUBREG:
3772
      total = 0;
3773
      /* If we can't tie these modes, make this expensive.  The larger
3774
         the mode, the more expensive it is.  */
3775
      if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3776
        return COSTS_N_INSNS (2
3777
                              + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3778
      break;
3779
 
3780
    default:
3781
      if (targetm.rtx_costs (x, code, outer_code, opno, &total, speed))
3782
        return total;
3783
      break;
3784
    }
3785
 
3786
  /* Sum the costs of the sub-rtx's, plus cost of this operation,
3787
     which is already in total.  */
3788
 
3789
  fmt = GET_RTX_FORMAT (code);
3790
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3791
    if (fmt[i] == 'e')
3792
      total += rtx_cost (XEXP (x, i), code, i, speed);
3793
    else if (fmt[i] == 'E')
3794
      for (j = 0; j < XVECLEN (x, i); j++)
3795
        total += rtx_cost (XVECEXP (x, i, j), code, i, speed);
3796
 
3797
  return total;
3798
}
3799
 
3800
/* Fill in the structure C with information about both speed and size rtx
3801
   costs for X, which is operand OPNO in an expression with code OUTER.  */
3802
 
3803
void
3804
get_full_rtx_cost (rtx x, enum rtx_code outer, int opno,
3805
                   struct full_rtx_costs *c)
3806
{
3807
  c->speed = rtx_cost (x, outer, opno, true);
3808
  c->size = rtx_cost (x, outer, opno, false);
3809
}
3810
 
3811
 
3812
/* Return cost of address expression X.
3813
   Expect that X is properly formed address reference.
3814
 
3815
   SPEED parameter specify whether costs optimized for speed or size should
3816
   be returned.  */
3817
 
3818
int
3819
address_cost (rtx x, enum machine_mode mode, addr_space_t as, bool speed)
3820
{
3821
  /* We may be asked for cost of various unusual addresses, such as operands
3822
     of push instruction.  It is not worthwhile to complicate writing
3823
     of the target hook by such cases.  */
3824
 
3825
  if (!memory_address_addr_space_p (mode, x, as))
3826
    return 1000;
3827
 
3828
  return targetm.address_cost (x, speed);
3829
}
3830
 
3831
/* If the target doesn't override, compute the cost as with arithmetic.  */
3832
 
3833
int
3834
default_address_cost (rtx x, bool speed)
3835
{
3836
  return rtx_cost (x, MEM, 0, speed);
3837
}
3838
 
3839
 
3840
unsigned HOST_WIDE_INT
3841
nonzero_bits (const_rtx x, enum machine_mode mode)
3842
{
3843
  return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3844
}
3845
 
3846
unsigned int
3847
num_sign_bit_copies (const_rtx x, enum machine_mode mode)
3848
{
3849
  return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3850
}
3851
 
3852
/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3853
   It avoids exponential behavior in nonzero_bits1 when X has
3854
   identical subexpressions on the first or the second level.  */
3855
 
3856
static unsigned HOST_WIDE_INT
3857
cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
3858
                     enum machine_mode known_mode,
3859
                     unsigned HOST_WIDE_INT known_ret)
3860
{
3861
  if (x == known_x && mode == known_mode)
3862
    return known_ret;
3863
 
3864
  /* Try to find identical subexpressions.  If found call
3865
     nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3866
     precomputed value for the subexpression as KNOWN_RET.  */
3867
 
3868
  if (ARITHMETIC_P (x))
3869
    {
3870
      rtx x0 = XEXP (x, 0);
3871
      rtx x1 = XEXP (x, 1);
3872
 
3873
      /* Check the first level.  */
3874
      if (x0 == x1)
3875
        return nonzero_bits1 (x, mode, x0, mode,
3876
                              cached_nonzero_bits (x0, mode, known_x,
3877
                                                   known_mode, known_ret));
3878
 
3879
      /* Check the second level.  */
3880
      if (ARITHMETIC_P (x0)
3881
          && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3882
        return nonzero_bits1 (x, mode, x1, mode,
3883
                              cached_nonzero_bits (x1, mode, known_x,
3884
                                                   known_mode, known_ret));
3885
 
3886
      if (ARITHMETIC_P (x1)
3887
          && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3888
        return nonzero_bits1 (x, mode, x0, mode,
3889
                              cached_nonzero_bits (x0, mode, known_x,
3890
                                                   known_mode, known_ret));
3891
    }
3892
 
3893
  return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3894
}
3895
 
3896
/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3897
   We don't let nonzero_bits recur into num_sign_bit_copies, because that
3898
   is less useful.  We can't allow both, because that results in exponential
3899
   run time recursion.  There is a nullstone testcase that triggered
3900
   this.  This macro avoids accidental uses of num_sign_bit_copies.  */
3901
#define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3902
 
3903
/* Given an expression, X, compute which bits in X can be nonzero.
3904
   We don't care about bits outside of those defined in MODE.
3905
 
3906
   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3907
   an arithmetic operation, we can do better.  */
3908
 
3909
static unsigned HOST_WIDE_INT
3910
nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
3911
               enum machine_mode known_mode,
3912
               unsigned HOST_WIDE_INT known_ret)
3913
{
3914
  unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3915
  unsigned HOST_WIDE_INT inner_nz;
3916
  enum rtx_code code;
3917
  enum machine_mode inner_mode;
3918
  unsigned int mode_width = GET_MODE_PRECISION (mode);
3919
 
3920
  /* For floating-point and vector values, assume all bits are needed.  */
3921
  if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)
3922
      || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
3923
    return nonzero;
3924
 
3925
  /* If X is wider than MODE, use its mode instead.  */
3926
  if (GET_MODE_PRECISION (GET_MODE (x)) > mode_width)
3927
    {
3928
      mode = GET_MODE (x);
3929
      nonzero = GET_MODE_MASK (mode);
3930
      mode_width = GET_MODE_PRECISION (mode);
3931
    }
3932
 
3933
  if (mode_width > HOST_BITS_PER_WIDE_INT)
3934
    /* Our only callers in this case look for single bit values.  So
3935
       just return the mode mask.  Those tests will then be false.  */
3936
    return nonzero;
3937
 
3938
#ifndef WORD_REGISTER_OPERATIONS
3939
  /* If MODE is wider than X, but both are a single word for both the host
3940
     and target machines, we can compute this from which bits of the
3941
     object might be nonzero in its own mode, taking into account the fact
3942
     that on many CISC machines, accessing an object in a wider mode
3943
     causes the high-order bits to become undefined.  So they are
3944
     not known to be zero.  */
3945
 
3946
  if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3947
      && GET_MODE_PRECISION (GET_MODE (x)) <= BITS_PER_WORD
3948
      && GET_MODE_PRECISION (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3949
      && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (GET_MODE (x)))
3950
    {
3951
      nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3952
                                      known_x, known_mode, known_ret);
3953
      nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3954
      return nonzero;
3955
    }
3956
#endif
3957
 
3958
  code = GET_CODE (x);
3959
  switch (code)
3960
    {
3961
    case REG:
3962
#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3963
      /* If pointers extend unsigned and this is a pointer in Pmode, say that
3964
         all the bits above ptr_mode are known to be zero.  */
3965
      /* As we do not know which address space the pointer is refering to,
3966
         we can do this only if the target does not support different pointer
3967
         or address modes depending on the address space.  */
3968
      if (target_default_pointer_address_modes_p ()
3969
          && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3970
          && REG_POINTER (x))
3971
        nonzero &= GET_MODE_MASK (ptr_mode);
3972
#endif
3973
 
3974
      /* Include declared information about alignment of pointers.  */
3975
      /* ??? We don't properly preserve REG_POINTER changes across
3976
         pointer-to-integer casts, so we can't trust it except for
3977
         things that we know must be pointers.  See execute/960116-1.c.  */
3978
      if ((x == stack_pointer_rtx
3979
           || x == frame_pointer_rtx
3980
           || x == arg_pointer_rtx)
3981
          && REGNO_POINTER_ALIGN (REGNO (x)))
3982
        {
3983
          unsigned HOST_WIDE_INT alignment
3984
            = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3985
 
3986
#ifdef PUSH_ROUNDING
3987
          /* If PUSH_ROUNDING is defined, it is possible for the
3988
             stack to be momentarily aligned only to that amount,
3989
             so we pick the least alignment.  */
3990
          if (x == stack_pointer_rtx && PUSH_ARGS)
3991
            alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3992
                             alignment);
3993
#endif
3994
 
3995
          nonzero &= ~(alignment - 1);
3996
        }
3997
 
3998
      {
3999
        unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
4000
        rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
4001
                                              known_mode, known_ret,
4002
                                              &nonzero_for_hook);
4003
 
4004
        if (new_rtx)
4005
          nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
4006
                                                   known_mode, known_ret);
4007
 
4008
        return nonzero_for_hook;
4009
      }
4010
 
4011
    case CONST_INT:
4012
#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
4013
      /* If X is negative in MODE, sign-extend the value.  */
4014
      if (INTVAL (x) > 0
4015
          && mode_width < BITS_PER_WORD
4016
          && (UINTVAL (x) & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
4017
             != 0)
4018
        return UINTVAL (x) | ((unsigned HOST_WIDE_INT) (-1) << mode_width);
4019
#endif
4020
 
4021
      return UINTVAL (x);
4022
 
4023
    case MEM:
4024
#ifdef LOAD_EXTEND_OP
4025
      /* In many, if not most, RISC machines, reading a byte from memory
4026
         zeros the rest of the register.  Noticing that fact saves a lot
4027
         of extra zero-extends.  */
4028
      if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
4029
        nonzero &= GET_MODE_MASK (GET_MODE (x));
4030
#endif
4031
      break;
4032
 
4033
    case EQ:  case NE:
4034
    case UNEQ:  case LTGT:
4035
    case GT:  case GTU:  case UNGT:
4036
    case LT:  case LTU:  case UNLT:
4037
    case GE:  case GEU:  case UNGE:
4038
    case LE:  case LEU:  case UNLE:
4039
    case UNORDERED: case ORDERED:
4040
      /* If this produces an integer result, we know which bits are set.
4041
         Code here used to clear bits outside the mode of X, but that is
4042
         now done above.  */
4043
      /* Mind that MODE is the mode the caller wants to look at this
4044
         operation in, and not the actual operation mode.  We can wind
4045
         up with (subreg:DI (gt:V4HI x y)), and we don't have anything
4046
         that describes the results of a vector compare.  */
4047
      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
4048
          && mode_width <= HOST_BITS_PER_WIDE_INT)
4049
        nonzero = STORE_FLAG_VALUE;
4050
      break;
4051
 
4052
    case NEG:
4053
#if 0
4054
      /* Disabled to avoid exponential mutual recursion between nonzero_bits
4055
         and num_sign_bit_copies.  */
4056
      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4057
          == GET_MODE_PRECISION (GET_MODE (x)))
4058
        nonzero = 1;
4059
#endif
4060
 
4061
      if (GET_MODE_PRECISION (GET_MODE (x)) < mode_width)
4062
        nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
4063
      break;
4064
 
4065
    case ABS:
4066
#if 0
4067
      /* Disabled to avoid exponential mutual recursion between nonzero_bits
4068
         and num_sign_bit_copies.  */
4069
      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4070
          == GET_MODE_PRECISION (GET_MODE (x)))
4071
        nonzero = 1;
4072
#endif
4073
      break;
4074
 
4075
    case TRUNCATE:
4076
      nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
4077
                                       known_x, known_mode, known_ret)
4078
                  & GET_MODE_MASK (mode));
4079
      break;
4080
 
4081
    case ZERO_EXTEND:
4082
      nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4083
                                      known_x, known_mode, known_ret);
4084
      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4085
        nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4086
      break;
4087
 
4088
    case SIGN_EXTEND:
4089
      /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4090
         Otherwise, show all the bits in the outer mode but not the inner
4091
         may be nonzero.  */
4092
      inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
4093
                                      known_x, known_mode, known_ret);
4094
      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4095
        {
4096
          inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4097
          if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz))
4098
            inner_nz |= (GET_MODE_MASK (mode)
4099
                         & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
4100
        }
4101
 
4102
      nonzero &= inner_nz;
4103
      break;
4104
 
4105
    case AND:
4106
      nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4107
                                       known_x, known_mode, known_ret)
4108
                 & cached_nonzero_bits (XEXP (x, 1), mode,
4109
                                        known_x, known_mode, known_ret);
4110
      break;
4111
 
4112
    case XOR:   case IOR:
4113
    case UMIN:  case UMAX:  case SMIN:  case SMAX:
4114
      {
4115
        unsigned HOST_WIDE_INT nonzero0
4116
           = cached_nonzero_bits (XEXP (x, 0), mode,
4117
                                  known_x, known_mode, known_ret);
4118
 
4119
        /* Don't call nonzero_bits for the second time if it cannot change
4120
           anything.  */
4121
        if ((nonzero & nonzero0) != nonzero)
4122
          nonzero &= nonzero0
4123
                     | cached_nonzero_bits (XEXP (x, 1), mode,
4124
                                            known_x, known_mode, known_ret);
4125
      }
4126
      break;
4127
 
4128
    case PLUS:  case MINUS:
4129
    case MULT:
4130
    case DIV:   case UDIV:
4131
    case MOD:   case UMOD:
4132
      /* We can apply the rules of arithmetic to compute the number of
4133
         high- and low-order zero bits of these operations.  We start by
4134
         computing the width (position of the highest-order nonzero bit)
4135
         and the number of low-order zero bits for each value.  */
4136
      {
4137
        unsigned HOST_WIDE_INT nz0
4138
          = cached_nonzero_bits (XEXP (x, 0), mode,
4139
                                 known_x, known_mode, known_ret);
4140
        unsigned HOST_WIDE_INT nz1
4141
          = cached_nonzero_bits (XEXP (x, 1), mode,
4142
                                 known_x, known_mode, known_ret);
4143
        int sign_index = GET_MODE_PRECISION (GET_MODE (x)) - 1;
4144
        int width0 = floor_log2 (nz0) + 1;
4145
        int width1 = floor_log2 (nz1) + 1;
4146
        int low0 = floor_log2 (nz0 & -nz0);
4147
        int low1 = floor_log2 (nz1 & -nz1);
4148
        unsigned HOST_WIDE_INT op0_maybe_minusp
4149
          = nz0 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
4150
        unsigned HOST_WIDE_INT op1_maybe_minusp
4151
          = nz1 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
4152
        unsigned int result_width = mode_width;
4153
        int result_low = 0;
4154
 
4155
        switch (code)
4156
          {
4157
          case PLUS:
4158
            result_width = MAX (width0, width1) + 1;
4159
            result_low = MIN (low0, low1);
4160
            break;
4161
          case MINUS:
4162
            result_low = MIN (low0, low1);
4163
            break;
4164
          case MULT:
4165
            result_width = width0 + width1;
4166
            result_low = low0 + low1;
4167
            break;
4168
          case DIV:
4169
            if (width1 == 0)
4170
              break;
4171
            if (!op0_maybe_minusp && !op1_maybe_minusp)
4172
              result_width = width0;
4173
            break;
4174
          case UDIV:
4175
            if (width1 == 0)
4176
              break;
4177
            result_width = width0;
4178
            break;
4179
          case MOD:
4180
            if (width1 == 0)
4181
              break;
4182
            if (!op0_maybe_minusp && !op1_maybe_minusp)
4183
              result_width = MIN (width0, width1);
4184
            result_low = MIN (low0, low1);
4185
            break;
4186
          case UMOD:
4187
            if (width1 == 0)
4188
              break;
4189
            result_width = MIN (width0, width1);
4190
            result_low = MIN (low0, low1);
4191
            break;
4192
          default:
4193
            gcc_unreachable ();
4194
          }
4195
 
4196
        if (result_width < mode_width)
4197
          nonzero &= ((unsigned HOST_WIDE_INT) 1 << result_width) - 1;
4198
 
4199
        if (result_low > 0)
4200
          nonzero &= ~(((unsigned HOST_WIDE_INT) 1 << result_low) - 1);
4201
      }
4202
      break;
4203
 
4204
    case ZERO_EXTRACT:
4205
      if (CONST_INT_P (XEXP (x, 1))
4206
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4207
        nonzero &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
4208
      break;
4209
 
4210
    case SUBREG:
4211
      /* If this is a SUBREG formed for a promoted variable that has
4212
         been zero-extended, we know that at least the high-order bits
4213
         are zero, though others might be too.  */
4214
 
4215
      if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
4216
        nonzero = GET_MODE_MASK (GET_MODE (x))
4217
                  & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4218
                                         known_x, known_mode, known_ret);
4219
 
4220
      inner_mode = GET_MODE (SUBREG_REG (x));
4221
      /* If the inner mode is a single word for both the host and target
4222
         machines, we can compute this from which bits of the inner
4223
         object might be nonzero.  */
4224
      if (GET_MODE_PRECISION (inner_mode) <= BITS_PER_WORD
4225
          && (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT))
4226
        {
4227
          nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4228
                                          known_x, known_mode, known_ret);
4229
 
4230
#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4231
          /* If this is a typical RISC machine, we only have to worry
4232
             about the way loads are extended.  */
4233
          if ((LOAD_EXTEND_OP (inner_mode) == SIGN_EXTEND
4234
               ? val_signbit_known_set_p (inner_mode, nonzero)
4235
               : LOAD_EXTEND_OP (inner_mode) != ZERO_EXTEND)
4236
              || !MEM_P (SUBREG_REG (x)))
4237
#endif
4238
            {
4239
              /* On many CISC machines, accessing an object in a wider mode
4240
                 causes the high-order bits to become undefined.  So they are
4241
                 not known to be zero.  */
4242
              if (GET_MODE_PRECISION (GET_MODE (x))
4243
                  > GET_MODE_PRECISION (inner_mode))
4244
                nonzero |= (GET_MODE_MASK (GET_MODE (x))
4245
                            & ~GET_MODE_MASK (inner_mode));
4246
            }
4247
        }
4248
      break;
4249
 
4250
    case ASHIFTRT:
4251
    case LSHIFTRT:
4252
    case ASHIFT:
4253
    case ROTATE:
4254
      /* The nonzero bits are in two classes: any bits within MODE
4255
         that aren't in GET_MODE (x) are always significant.  The rest of the
4256
         nonzero bits are those that are significant in the operand of
4257
         the shift when shifted the appropriate number of bits.  This
4258
         shows that high-order bits are cleared by the right shift and
4259
         low-order bits by left shifts.  */
4260
      if (CONST_INT_P (XEXP (x, 1))
4261
          && INTVAL (XEXP (x, 1)) >= 0
4262
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4263
          && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
4264
        {
4265
          enum machine_mode inner_mode = GET_MODE (x);
4266
          unsigned int width = GET_MODE_PRECISION (inner_mode);
4267
          int count = INTVAL (XEXP (x, 1));
4268
          unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4269
          unsigned HOST_WIDE_INT op_nonzero
4270
            = cached_nonzero_bits (XEXP (x, 0), mode,
4271
                                   known_x, known_mode, known_ret);
4272
          unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4273
          unsigned HOST_WIDE_INT outer = 0;
4274
 
4275
          if (mode_width > width)
4276
            outer = (op_nonzero & nonzero & ~mode_mask);
4277
 
4278
          if (code == LSHIFTRT)
4279
            inner >>= count;
4280
          else if (code == ASHIFTRT)
4281
            {
4282
              inner >>= count;
4283
 
4284
              /* If the sign bit may have been nonzero before the shift, we
4285
                 need to mark all the places it could have been copied to
4286
                 by the shift as possibly nonzero.  */
4287
              if (inner & ((unsigned HOST_WIDE_INT) 1 << (width - 1 - count)))
4288
                inner |= (((unsigned HOST_WIDE_INT) 1 << count) - 1)
4289
                           << (width - count);
4290
            }
4291
          else if (code == ASHIFT)
4292
            inner <<= count;
4293
          else
4294
            inner = ((inner << (count % width)
4295
                      | (inner >> (width - (count % width)))) & mode_mask);
4296
 
4297
          nonzero &= (outer | inner);
4298
        }
4299
      break;
4300
 
4301
    case FFS:
4302
    case POPCOUNT:
4303
      /* This is at most the number of bits in the mode.  */
4304
      nonzero = ((unsigned HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4305
      break;
4306
 
4307
    case CLZ:
4308
      /* If CLZ has a known value at zero, then the nonzero bits are
4309
         that value, plus the number of bits in the mode minus one.  */
4310
      if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4311
        nonzero
4312
          |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4313
      else
4314
        nonzero = -1;
4315
      break;
4316
 
4317
    case CTZ:
4318
      /* If CTZ has a known value at zero, then the nonzero bits are
4319
         that value, plus the number of bits in the mode minus one.  */
4320
      if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4321
        nonzero
4322
          |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4323
      else
4324
        nonzero = -1;
4325
      break;
4326
 
4327
    case CLRSB:
4328
      /* This is at most the number of bits in the mode minus 1.  */
4329
      nonzero = ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4330
      break;
4331
 
4332
    case PARITY:
4333
      nonzero = 1;
4334
      break;
4335
 
4336
    case IF_THEN_ELSE:
4337
      {
4338
        unsigned HOST_WIDE_INT nonzero_true
4339
          = cached_nonzero_bits (XEXP (x, 1), mode,
4340
                                 known_x, known_mode, known_ret);
4341
 
4342
        /* Don't call nonzero_bits for the second time if it cannot change
4343
           anything.  */
4344
        if ((nonzero & nonzero_true) != nonzero)
4345
          nonzero &= nonzero_true
4346
                     | cached_nonzero_bits (XEXP (x, 2), mode,
4347
                                            known_x, known_mode, known_ret);
4348
      }
4349
      break;
4350
 
4351
    default:
4352
      break;
4353
    }
4354
 
4355
  return nonzero;
4356
}
4357
 
4358
/* See the macro definition above.  */
4359
#undef cached_num_sign_bit_copies
4360
 
4361
 
4362
/* The function cached_num_sign_bit_copies is a wrapper around
4363
   num_sign_bit_copies1.  It avoids exponential behavior in
4364
   num_sign_bit_copies1 when X has identical subexpressions on the
4365
   first or the second level.  */
4366
 
4367
static unsigned int
4368
cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
4369
                            enum machine_mode known_mode,
4370
                            unsigned int known_ret)
4371
{
4372
  if (x == known_x && mode == known_mode)
4373
    return known_ret;
4374
 
4375
  /* Try to find identical subexpressions.  If found call
4376
     num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4377
     the precomputed value for the subexpression as KNOWN_RET.  */
4378
 
4379
  if (ARITHMETIC_P (x))
4380
    {
4381
      rtx x0 = XEXP (x, 0);
4382
      rtx x1 = XEXP (x, 1);
4383
 
4384
      /* Check the first level.  */
4385
      if (x0 == x1)
4386
        return
4387
          num_sign_bit_copies1 (x, mode, x0, mode,
4388
                                cached_num_sign_bit_copies (x0, mode, known_x,
4389
                                                            known_mode,
4390
                                                            known_ret));
4391
 
4392
      /* Check the second level.  */
4393
      if (ARITHMETIC_P (x0)
4394
          && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4395
        return
4396
          num_sign_bit_copies1 (x, mode, x1, mode,
4397
                                cached_num_sign_bit_copies (x1, mode, known_x,
4398
                                                            known_mode,
4399
                                                            known_ret));
4400
 
4401
      if (ARITHMETIC_P (x1)
4402
          && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4403
        return
4404
          num_sign_bit_copies1 (x, mode, x0, mode,
4405
                                cached_num_sign_bit_copies (x0, mode, known_x,
4406
                                                            known_mode,
4407
                                                            known_ret));
4408
    }
4409
 
4410
  return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4411
}
4412
 
4413
/* Return the number of bits at the high-order end of X that are known to
4414
   be equal to the sign bit.  X will be used in mode MODE; if MODE is
4415
   VOIDmode, X will be used in its own mode.  The returned value  will always
4416
   be between 1 and the number of bits in MODE.  */
4417
 
4418
static unsigned int
4419
num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
4420
                      enum machine_mode known_mode,
4421
                      unsigned int known_ret)
4422
{
4423
  enum rtx_code code = GET_CODE (x);
4424
  unsigned int bitwidth = GET_MODE_PRECISION (mode);
4425
  int num0, num1, result;
4426
  unsigned HOST_WIDE_INT nonzero;
4427
 
4428
  /* If we weren't given a mode, use the mode of X.  If the mode is still
4429
     VOIDmode, we don't know anything.  Likewise if one of the modes is
4430
     floating-point.  */
4431
 
4432
  if (mode == VOIDmode)
4433
    mode = GET_MODE (x);
4434
 
4435
  if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))
4436
      || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
4437
    return 1;
4438
 
4439
  /* For a smaller object, just ignore the high bits.  */
4440
  if (bitwidth < GET_MODE_PRECISION (GET_MODE (x)))
4441
    {
4442
      num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4443
                                         known_x, known_mode, known_ret);
4444
      return MAX (1,
4445
                  num0 - (int) (GET_MODE_PRECISION (GET_MODE (x)) - bitwidth));
4446
    }
4447
 
4448
  if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_PRECISION (GET_MODE (x)))
4449
    {
4450
#ifndef WORD_REGISTER_OPERATIONS
4451
      /* If this machine does not do all register operations on the entire
4452
         register and MODE is wider than the mode of X, we can say nothing
4453
         at all about the high-order bits.  */
4454
      return 1;
4455
#else
4456
      /* Likewise on machines that do, if the mode of the object is smaller
4457
         than a word and loads of that size don't sign extend, we can say
4458
         nothing about the high order bits.  */
4459
      if (GET_MODE_PRECISION (GET_MODE (x)) < BITS_PER_WORD
4460
#ifdef LOAD_EXTEND_OP
4461
          && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4462
#endif
4463
          )
4464
        return 1;
4465
#endif
4466
    }
4467
 
4468
  switch (code)
4469
    {
4470
    case REG:
4471
 
4472
#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4473
      /* If pointers extend signed and this is a pointer in Pmode, say that
4474
         all the bits above ptr_mode are known to be sign bit copies.  */
4475
      /* As we do not know which address space the pointer is refering to,
4476
         we can do this only if the target does not support different pointer
4477
         or address modes depending on the address space.  */
4478
      if (target_default_pointer_address_modes_p ()
4479
          && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4480
          && mode == Pmode && REG_POINTER (x))
4481
        return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1;
4482
#endif
4483
 
4484
      {
4485
        unsigned int copies_for_hook = 1, copies = 1;
4486
        rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4487
                                                     known_mode, known_ret,
4488
                                                     &copies_for_hook);
4489
 
4490
        if (new_rtx)
4491
          copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
4492
                                               known_mode, known_ret);
4493
 
4494
        if (copies > 1 || copies_for_hook > 1)
4495
          return MAX (copies, copies_for_hook);
4496
 
4497
        /* Else, use nonzero_bits to guess num_sign_bit_copies (see below).  */
4498
      }
4499
      break;
4500
 
4501
    case MEM:
4502
#ifdef LOAD_EXTEND_OP
4503
      /* Some RISC machines sign-extend all loads of smaller than a word.  */
4504
      if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4505
        return MAX (1, ((int) bitwidth
4506
                        - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1));
4507
#endif
4508
      break;
4509
 
4510
    case CONST_INT:
4511
      /* If the constant is negative, take its 1's complement and remask.
4512
         Then see how many zero bits we have.  */
4513
      nonzero = UINTVAL (x) & GET_MODE_MASK (mode);
4514
      if (bitwidth <= HOST_BITS_PER_WIDE_INT
4515
          && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4516
        nonzero = (~nonzero) & GET_MODE_MASK (mode);
4517
 
4518
      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4519
 
4520
    case SUBREG:
4521
      /* If this is a SUBREG for a promoted object that is sign-extended
4522
         and we are looking at it in a wider mode, we know that at least the
4523
         high-order bits are known to be sign bit copies.  */
4524
 
4525
      if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4526
        {
4527
          num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4528
                                             known_x, known_mode, known_ret);
4529
          return MAX ((int) bitwidth
4530
                      - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1,
4531
                      num0);
4532
        }
4533
 
4534
      /* For a smaller object, just ignore the high bits.  */
4535
      if (bitwidth <= GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))))
4536
        {
4537
          num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4538
                                             known_x, known_mode, known_ret);
4539
          return MAX (1, (num0
4540
                          - (int) (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x)))
4541
                                   - bitwidth)));
4542
        }
4543
 
4544
#ifdef WORD_REGISTER_OPERATIONS
4545
#ifdef LOAD_EXTEND_OP
4546
      /* For paradoxical SUBREGs on machines where all register operations
4547
         affect the entire register, just look inside.  Note that we are
4548
         passing MODE to the recursive call, so the number of sign bit copies
4549
         will remain relative to that mode, not the inner mode.  */
4550
 
4551
      /* This works only if loads sign extend.  Otherwise, if we get a
4552
         reload for the inner part, it may be loaded from the stack, and
4553
         then we lose all sign bit copies that existed before the store
4554
         to the stack.  */
4555
 
4556
      if (paradoxical_subreg_p (x)
4557
          && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4558
          && MEM_P (SUBREG_REG (x)))
4559
        return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4560
                                           known_x, known_mode, known_ret);
4561
#endif
4562
#endif
4563
      break;
4564
 
4565
    case SIGN_EXTRACT:
4566
      if (CONST_INT_P (XEXP (x, 1)))
4567
        return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4568
      break;
4569
 
4570
    case SIGN_EXTEND:
4571
      return (bitwidth - GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
4572
              + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4573
                                            known_x, known_mode, known_ret));
4574
 
4575
    case TRUNCATE:
4576
      /* For a smaller object, just ignore the high bits.  */
4577
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4578
                                         known_x, known_mode, known_ret);
4579
      return MAX (1, (num0 - (int) (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
4580
                                    - bitwidth)));
4581
 
4582
    case NOT:
4583
      return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4584
                                         known_x, known_mode, known_ret);
4585
 
4586
    case ROTATE:       case ROTATERT:
4587
      /* If we are rotating left by a number of bits less than the number
4588
         of sign bit copies, we can just subtract that amount from the
4589
         number.  */
4590
      if (CONST_INT_P (XEXP (x, 1))
4591
          && INTVAL (XEXP (x, 1)) >= 0
4592
          && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4593
        {
4594
          num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4595
                                             known_x, known_mode, known_ret);
4596
          return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4597
                                 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4598
        }
4599
      break;
4600
 
4601
    case NEG:
4602
      /* In general, this subtracts one sign bit copy.  But if the value
4603
         is known to be positive, the number of sign bit copies is the
4604
         same as that of the input.  Finally, if the input has just one bit
4605
         that might be nonzero, all the bits are copies of the sign bit.  */
4606
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4607
                                         known_x, known_mode, known_ret);
4608
      if (bitwidth > HOST_BITS_PER_WIDE_INT)
4609
        return num0 > 1 ? num0 - 1 : 1;
4610
 
4611
      nonzero = nonzero_bits (XEXP (x, 0), mode);
4612
      if (nonzero == 1)
4613
        return bitwidth;
4614
 
4615
      if (num0 > 1
4616
          && (((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4617
        num0--;
4618
 
4619
      return num0;
4620
 
4621
    case IOR:   case AND:   case XOR:
4622
    case SMIN:  case SMAX:  case UMIN:  case UMAX:
4623
      /* Logical operations will preserve the number of sign-bit copies.
4624
         MIN and MAX operations always return one of the operands.  */
4625
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4626
                                         known_x, known_mode, known_ret);
4627
      num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4628
                                         known_x, known_mode, known_ret);
4629
 
4630
      /* If num1 is clearing some of the top bits then regardless of
4631
         the other term, we are guaranteed to have at least that many
4632
         high-order zero bits.  */
4633
      if (code == AND
4634
          && num1 > 1
4635
          && bitwidth <= HOST_BITS_PER_WIDE_INT
4636
          && CONST_INT_P (XEXP (x, 1))
4637
          && (UINTVAL (XEXP (x, 1))
4638
              & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) == 0)
4639
        return num1;
4640
 
4641
      /* Similarly for IOR when setting high-order bits.  */
4642
      if (code == IOR
4643
          && num1 > 1
4644
          && bitwidth <= HOST_BITS_PER_WIDE_INT
4645
          && CONST_INT_P (XEXP (x, 1))
4646
          && (UINTVAL (XEXP (x, 1))
4647
              & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4648
        return num1;
4649
 
4650
      return MIN (num0, num1);
4651
 
4652
    case PLUS:  case MINUS:
4653
      /* For addition and subtraction, we can have a 1-bit carry.  However,
4654
         if we are subtracting 1 from a positive number, there will not
4655
         be such a carry.  Furthermore, if the positive number is known to
4656
         be 0 or 1, we know the result is either -1 or 0.  */
4657
 
4658
      if (code == PLUS && XEXP (x, 1) == constm1_rtx
4659
          && bitwidth <= HOST_BITS_PER_WIDE_INT)
4660
        {
4661
          nonzero = nonzero_bits (XEXP (x, 0), mode);
4662
          if ((((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4663
            return (nonzero == 1 || nonzero == 0 ? bitwidth
4664
                    : bitwidth - floor_log2 (nonzero) - 1);
4665
        }
4666
 
4667
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4668
                                         known_x, known_mode, known_ret);
4669
      num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4670
                                         known_x, known_mode, known_ret);
4671
      result = MAX (1, MIN (num0, num1) - 1);
4672
 
4673
      return result;
4674
 
4675
    case MULT:
4676
      /* The number of bits of the product is the sum of the number of
4677
         bits of both terms.  However, unless one of the terms if known
4678
         to be positive, we must allow for an additional bit since negating
4679
         a negative number can remove one sign bit copy.  */
4680
 
4681
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4682
                                         known_x, known_mode, known_ret);
4683
      num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4684
                                         known_x, known_mode, known_ret);
4685
 
4686
      result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4687
      if (result > 0
4688
          && (bitwidth > HOST_BITS_PER_WIDE_INT
4689
              || (((nonzero_bits (XEXP (x, 0), mode)
4690
                    & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4691
                  && ((nonzero_bits (XEXP (x, 1), mode)
4692
                       & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)))
4693
                      != 0))))
4694
        result--;
4695
 
4696
      return MAX (1, result);
4697
 
4698
    case UDIV:
4699
      /* The result must be <= the first operand.  If the first operand
4700
         has the high bit set, we know nothing about the number of sign
4701
         bit copies.  */
4702
      if (bitwidth > HOST_BITS_PER_WIDE_INT)
4703
        return 1;
4704
      else if ((nonzero_bits (XEXP (x, 0), mode)
4705
                & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4706
        return 1;
4707
      else
4708
        return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4709
                                           known_x, known_mode, known_ret);
4710
 
4711
    case UMOD:
4712
      /* The result must be <= the second operand.  If the second operand
4713
         has (or just might have) the high bit set, we know nothing about
4714
         the number of sign bit copies.  */
4715
      if (bitwidth > HOST_BITS_PER_WIDE_INT)
4716
        return 1;
4717
      else if ((nonzero_bits (XEXP (x, 1), mode)
4718
                & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4719
        return 1;
4720
      else
4721
        return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4722
                                           known_x, known_mode, known_ret);
4723
 
4724
    case DIV:
4725
      /* Similar to unsigned division, except that we have to worry about
4726
         the case where the divisor is negative, in which case we have
4727
         to add 1.  */
4728
      result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4729
                                           known_x, known_mode, known_ret);
4730
      if (result > 1
4731
          && (bitwidth > HOST_BITS_PER_WIDE_INT
4732
              || (nonzero_bits (XEXP (x, 1), mode)
4733
                  & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4734
        result--;
4735
 
4736
      return result;
4737
 
4738
    case MOD:
4739
      result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4740
                                           known_x, known_mode, known_ret);
4741
      if (result > 1
4742
          && (bitwidth > HOST_BITS_PER_WIDE_INT
4743
              || (nonzero_bits (XEXP (x, 1), mode)
4744
                  & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4745
        result--;
4746
 
4747
      return result;
4748
 
4749
    case ASHIFTRT:
4750
      /* Shifts by a constant add to the number of bits equal to the
4751
         sign bit.  */
4752
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4753
                                         known_x, known_mode, known_ret);
4754
      if (CONST_INT_P (XEXP (x, 1))
4755
          && INTVAL (XEXP (x, 1)) > 0
4756
          && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
4757
        num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4758
 
4759
      return num0;
4760
 
4761
    case ASHIFT:
4762
      /* Left shifts destroy copies.  */
4763
      if (!CONST_INT_P (XEXP (x, 1))
4764
          || INTVAL (XEXP (x, 1)) < 0
4765
          || INTVAL (XEXP (x, 1)) >= (int) bitwidth
4766
          || INTVAL (XEXP (x, 1)) >= GET_MODE_PRECISION (GET_MODE (x)))
4767
        return 1;
4768
 
4769
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4770
                                         known_x, known_mode, known_ret);
4771
      return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4772
 
4773
    case IF_THEN_ELSE:
4774
      num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4775
                                         known_x, known_mode, known_ret);
4776
      num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4777
                                         known_x, known_mode, known_ret);
4778
      return MIN (num0, num1);
4779
 
4780
    case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
4781
    case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
4782
    case GEU: case GTU: case LEU: case LTU:
4783
    case UNORDERED: case ORDERED:
4784
      /* If the constant is negative, take its 1's complement and remask.
4785
         Then see how many zero bits we have.  */
4786
      nonzero = STORE_FLAG_VALUE;
4787
      if (bitwidth <= HOST_BITS_PER_WIDE_INT
4788
          && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4789
        nonzero = (~nonzero) & GET_MODE_MASK (mode);
4790
 
4791
      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4792
 
4793
    default:
4794
      break;
4795
    }
4796
 
4797
  /* If we haven't been able to figure it out by one of the above rules,
4798
     see if some of the high-order bits are known to be zero.  If so,
4799
     count those bits and return one less than that amount.  If we can't
4800
     safely compute the mask for this mode, always return BITWIDTH.  */
4801
 
4802
  bitwidth = GET_MODE_PRECISION (mode);
4803
  if (bitwidth > HOST_BITS_PER_WIDE_INT)
4804
    return 1;
4805
 
4806
  nonzero = nonzero_bits (x, mode);
4807
  return nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))
4808
         ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4809
}
4810
 
4811
/* Calculate the rtx_cost of a single instruction.  A return value of
4812
   zero indicates an instruction pattern without a known cost.  */
4813
 
4814
int
4815
insn_rtx_cost (rtx pat, bool speed)
4816
{
4817
  int i, cost;
4818
  rtx set;
4819
 
4820
  /* Extract the single set rtx from the instruction pattern.
4821
     We can't use single_set since we only have the pattern.  */
4822
  if (GET_CODE (pat) == SET)
4823
    set = pat;
4824
  else if (GET_CODE (pat) == PARALLEL)
4825
    {
4826
      set = NULL_RTX;
4827
      for (i = 0; i < XVECLEN (pat, 0); i++)
4828
        {
4829
          rtx x = XVECEXP (pat, 0, i);
4830
          if (GET_CODE (x) == SET)
4831
            {
4832
              if (set)
4833
                return 0;
4834
              set = x;
4835
            }
4836
        }
4837
      if (!set)
4838
        return 0;
4839
    }
4840
  else
4841
    return 0;
4842
 
4843
  cost = set_src_cost (SET_SRC (set), speed);
4844
  return cost > 0 ? cost : COSTS_N_INSNS (1);
4845
}
4846
 
4847
/* Given an insn INSN and condition COND, return the condition in a
4848
   canonical form to simplify testing by callers.  Specifically:
4849
 
4850
   (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4851
   (2) Both operands will be machine operands; (cc0) will have been replaced.
4852
   (3) If an operand is a constant, it will be the second operand.
4853
   (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4854
       for GE, GEU, and LEU.
4855
 
4856
   If the condition cannot be understood, or is an inequality floating-point
4857
   comparison which needs to be reversed, 0 will be returned.
4858
 
4859
   If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4860
 
4861
   If EARLIEST is nonzero, it is a pointer to a place where the earliest
4862
   insn used in locating the condition was found.  If a replacement test
4863
   of the condition is desired, it should be placed in front of that
4864
   insn and we will be sure that the inputs are still valid.
4865
 
4866
   If WANT_REG is nonzero, we wish the condition to be relative to that
4867
   register, if possible.  Therefore, do not canonicalize the condition
4868
   further.  If ALLOW_CC_MODE is nonzero, allow the condition returned
4869
   to be a compare to a CC mode register.
4870
 
4871
   If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4872
   and at INSN.  */
4873
 
4874
rtx
4875
canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4876
                        rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4877
{
4878
  enum rtx_code code;
4879
  rtx prev = insn;
4880
  const_rtx set;
4881
  rtx tem;
4882
  rtx op0, op1;
4883
  int reverse_code = 0;
4884
  enum machine_mode mode;
4885
  basic_block bb = BLOCK_FOR_INSN (insn);
4886
 
4887
  code = GET_CODE (cond);
4888
  mode = GET_MODE (cond);
4889
  op0 = XEXP (cond, 0);
4890
  op1 = XEXP (cond, 1);
4891
 
4892
  if (reverse)
4893
    code = reversed_comparison_code (cond, insn);
4894
  if (code == UNKNOWN)
4895
    return 0;
4896
 
4897
  if (earliest)
4898
    *earliest = insn;
4899
 
4900
  /* If we are comparing a register with zero, see if the register is set
4901
     in the previous insn to a COMPARE or a comparison operation.  Perform
4902
     the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4903
     in cse.c  */
4904
 
4905
  while ((GET_RTX_CLASS (code) == RTX_COMPARE
4906
          || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4907
         && op1 == CONST0_RTX (GET_MODE (op0))
4908
         && op0 != want_reg)
4909
    {
4910
      /* Set nonzero when we find something of interest.  */
4911
      rtx x = 0;
4912
 
4913
#ifdef HAVE_cc0
4914
      /* If comparison with cc0, import actual comparison from compare
4915
         insn.  */
4916
      if (op0 == cc0_rtx)
4917
        {
4918
          if ((prev = prev_nonnote_insn (prev)) == 0
4919
              || !NONJUMP_INSN_P (prev)
4920
              || (set = single_set (prev)) == 0
4921
              || SET_DEST (set) != cc0_rtx)
4922
            return 0;
4923
 
4924
          op0 = SET_SRC (set);
4925
          op1 = CONST0_RTX (GET_MODE (op0));
4926
          if (earliest)
4927
            *earliest = prev;
4928
        }
4929
#endif
4930
 
4931
      /* If this is a COMPARE, pick up the two things being compared.  */
4932
      if (GET_CODE (op0) == COMPARE)
4933
        {
4934
          op1 = XEXP (op0, 1);
4935
          op0 = XEXP (op0, 0);
4936
          continue;
4937
        }
4938
      else if (!REG_P (op0))
4939
        break;
4940
 
4941
      /* Go back to the previous insn.  Stop if it is not an INSN.  We also
4942
         stop if it isn't a single set or if it has a REG_INC note because
4943
         we don't want to bother dealing with it.  */
4944
 
4945
      prev = prev_nonnote_nondebug_insn (prev);
4946
 
4947
      if (prev == 0
4948
          || !NONJUMP_INSN_P (prev)
4949
          || FIND_REG_INC_NOTE (prev, NULL_RTX)
4950
          /* In cfglayout mode, there do not have to be labels at the
4951
             beginning of a block, or jumps at the end, so the previous
4952
             conditions would not stop us when we reach bb boundary.  */
4953
          || BLOCK_FOR_INSN (prev) != bb)
4954
        break;
4955
 
4956
      set = set_of (op0, prev);
4957
 
4958
      if (set
4959
          && (GET_CODE (set) != SET
4960
              || !rtx_equal_p (SET_DEST (set), op0)))
4961
        break;
4962
 
4963
      /* If this is setting OP0, get what it sets it to if it looks
4964
         relevant.  */
4965
      if (set)
4966
        {
4967
          enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4968
#ifdef FLOAT_STORE_FLAG_VALUE
4969
          REAL_VALUE_TYPE fsfv;
4970
#endif
4971
 
4972
          /* ??? We may not combine comparisons done in a CCmode with
4973
             comparisons not done in a CCmode.  This is to aid targets
4974
             like Alpha that have an IEEE compliant EQ instruction, and
4975
             a non-IEEE compliant BEQ instruction.  The use of CCmode is
4976
             actually artificial, simply to prevent the combination, but
4977
             should not affect other platforms.
4978
 
4979
             However, we must allow VOIDmode comparisons to match either
4980
             CCmode or non-CCmode comparison, because some ports have
4981
             modeless comparisons inside branch patterns.
4982
 
4983
             ??? This mode check should perhaps look more like the mode check
4984
             in simplify_comparison in combine.  */
4985
 
4986
          if ((GET_CODE (SET_SRC (set)) == COMPARE
4987
               || (((code == NE
4988
                     || (code == LT
4989
                         && val_signbit_known_set_p (inner_mode,
4990
                                                     STORE_FLAG_VALUE))
4991
#ifdef FLOAT_STORE_FLAG_VALUE
4992
                     || (code == LT
4993
                         && SCALAR_FLOAT_MODE_P (inner_mode)
4994
                         && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4995
                             REAL_VALUE_NEGATIVE (fsfv)))
4996
#endif
4997
                     ))
4998
                   && COMPARISON_P (SET_SRC (set))))
4999
              && (((GET_MODE_CLASS (mode) == MODE_CC)
5000
                   == (GET_MODE_CLASS (inner_mode) == MODE_CC))
5001
                  || mode == VOIDmode || inner_mode == VOIDmode))
5002
            x = SET_SRC (set);
5003
          else if (((code == EQ
5004
                     || (code == GE
5005
                         && val_signbit_known_set_p (inner_mode,
5006
                                                     STORE_FLAG_VALUE))
5007
#ifdef FLOAT_STORE_FLAG_VALUE
5008
                     || (code == GE
5009
                         && SCALAR_FLOAT_MODE_P (inner_mode)
5010
                         && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5011
                             REAL_VALUE_NEGATIVE (fsfv)))
5012
#endif
5013
                     ))
5014
                   && COMPARISON_P (SET_SRC (set))
5015
                   && (((GET_MODE_CLASS (mode) == MODE_CC)
5016
                        == (GET_MODE_CLASS (inner_mode) == MODE_CC))
5017
                       || mode == VOIDmode || inner_mode == VOIDmode))
5018
 
5019
            {
5020
              reverse_code = 1;
5021
              x = SET_SRC (set);
5022
            }
5023
          else
5024
            break;
5025
        }
5026
 
5027
      else if (reg_set_p (op0, prev))
5028
        /* If this sets OP0, but not directly, we have to give up.  */
5029
        break;
5030
 
5031
      if (x)
5032
        {
5033
          /* If the caller is expecting the condition to be valid at INSN,
5034
             make sure X doesn't change before INSN.  */
5035
          if (valid_at_insn_p)
5036
            if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
5037
              break;
5038
          if (COMPARISON_P (x))
5039
            code = GET_CODE (x);
5040
          if (reverse_code)
5041
            {
5042
              code = reversed_comparison_code (x, prev);
5043
              if (code == UNKNOWN)
5044
                return 0;
5045
              reverse_code = 0;
5046
            }
5047
 
5048
          op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5049
          if (earliest)
5050
            *earliest = prev;
5051
        }
5052
    }
5053
 
5054
  /* If constant is first, put it last.  */
5055
  if (CONSTANT_P (op0))
5056
    code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
5057
 
5058
  /* If OP0 is the result of a comparison, we weren't able to find what
5059
     was really being compared, so fail.  */
5060
  if (!allow_cc_mode
5061
      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5062
    return 0;
5063
 
5064
  /* Canonicalize any ordered comparison with integers involving equality
5065
     if we can do computations in the relevant mode and we do not
5066
     overflow.  */
5067
 
5068
  if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
5069
      && CONST_INT_P (op1)
5070
      && GET_MODE (op0) != VOIDmode
5071
      && GET_MODE_PRECISION (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
5072
    {
5073
      HOST_WIDE_INT const_val = INTVAL (op1);
5074
      unsigned HOST_WIDE_INT uconst_val = const_val;
5075
      unsigned HOST_WIDE_INT max_val
5076
        = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
5077
 
5078
      switch (code)
5079
        {
5080
        case LE:
5081
          if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
5082
            code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
5083
          break;
5084
 
5085
        /* When cross-compiling, const_val might be sign-extended from
5086
           BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
5087
        case GE:
5088
          if ((const_val & max_val)
5089
              != ((unsigned HOST_WIDE_INT) 1
5090
                  << (GET_MODE_PRECISION (GET_MODE (op0)) - 1)))
5091
            code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
5092
          break;
5093
 
5094
        case LEU:
5095
          if (uconst_val < max_val)
5096
            code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
5097
          break;
5098
 
5099
        case GEU:
5100
          if (uconst_val != 0)
5101
            code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
5102
          break;
5103
 
5104
        default:
5105
          break;
5106
        }
5107
    }
5108
 
5109
  /* Never return CC0; return zero instead.  */
5110
  if (CC0_P (op0))
5111
    return 0;
5112
 
5113
  return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
5114
}
5115
 
5116
/* Given a jump insn JUMP, return the condition that will cause it to branch
5117
   to its JUMP_LABEL.  If the condition cannot be understood, or is an
5118
   inequality floating-point comparison which needs to be reversed, 0 will
5119
   be returned.
5120
 
5121
   If EARLIEST is nonzero, it is a pointer to a place where the earliest
5122
   insn used in locating the condition was found.  If a replacement test
5123
   of the condition is desired, it should be placed in front of that
5124
   insn and we will be sure that the inputs are still valid.  If EARLIEST
5125
   is null, the returned condition will be valid at INSN.
5126
 
5127
   If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
5128
   compare CC mode register.
5129
 
5130
   VALID_AT_INSN_P is the same as for canonicalize_condition.  */
5131
 
5132
rtx
5133
get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
5134
{
5135
  rtx cond;
5136
  int reverse;
5137
  rtx set;
5138
 
5139
  /* If this is not a standard conditional jump, we can't parse it.  */
5140
  if (!JUMP_P (jump)
5141
      || ! any_condjump_p (jump))
5142
    return 0;
5143
  set = pc_set (jump);
5144
 
5145
  cond = XEXP (SET_SRC (set), 0);
5146
 
5147
  /* If this branches to JUMP_LABEL when the condition is false, reverse
5148
     the condition.  */
5149
  reverse
5150
    = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
5151
      && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
5152
 
5153
  return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
5154
                                 allow_cc_mode, valid_at_insn_p);
5155
}
5156
 
5157
/* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
5158
   TARGET_MODE_REP_EXTENDED.
5159
 
5160
   Note that we assume that the property of
5161
   TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
5162
   narrower than mode B.  I.e., if A is a mode narrower than B then in
5163
   order to be able to operate on it in mode B, mode A needs to
5164
   satisfy the requirements set by the representation of mode B.  */
5165
 
5166
static void
5167
init_num_sign_bit_copies_in_rep (void)
5168
{
5169
  enum machine_mode mode, in_mode;
5170
 
5171
  for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
5172
       in_mode = GET_MODE_WIDER_MODE (mode))
5173
    for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
5174
         mode = GET_MODE_WIDER_MODE (mode))
5175
      {
5176
        enum machine_mode i;
5177
 
5178
        /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
5179
           extends to the next widest mode.  */
5180
        gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
5181
                    || GET_MODE_WIDER_MODE (mode) == in_mode);
5182
 
5183
        /* We are in in_mode.  Count how many bits outside of mode
5184
           have to be copies of the sign-bit.  */
5185
        for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
5186
          {
5187
            enum machine_mode wider = GET_MODE_WIDER_MODE (i);
5188
 
5189
            if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
5190
                /* We can only check sign-bit copies starting from the
5191
                   top-bit.  In order to be able to check the bits we
5192
                   have already seen we pretend that subsequent bits
5193
                   have to be sign-bit copies too.  */
5194
                || num_sign_bit_copies_in_rep [in_mode][mode])
5195
              num_sign_bit_copies_in_rep [in_mode][mode]
5196
                += GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i);
5197
          }
5198
      }
5199
}
5200
 
5201
/* Suppose that truncation from the machine mode of X to MODE is not a
5202
   no-op.  See if there is anything special about X so that we can
5203
   assume it already contains a truncated value of MODE.  */
5204
 
5205
bool
5206
truncated_to_mode (enum machine_mode mode, const_rtx x)
5207
{
5208
  /* This register has already been used in MODE without explicit
5209
     truncation.  */
5210
  if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
5211
    return true;
5212
 
5213
  /* See if we already satisfy the requirements of MODE.  If yes we
5214
     can just switch to MODE.  */
5215
  if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
5216
      && (num_sign_bit_copies (x, GET_MODE (x))
5217
          >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
5218
    return true;
5219
 
5220
  return false;
5221
}
5222
 
5223
/* Initialize non_rtx_starting_operands, which is used to speed up
5224
   for_each_rtx.  */
5225
void
5226
init_rtlanal (void)
5227
{
5228
  int i;
5229
  for (i = 0; i < NUM_RTX_CODE; i++)
5230
    {
5231
      const char *format = GET_RTX_FORMAT (i);
5232
      const char *first = strpbrk (format, "eEV");
5233
      non_rtx_starting_operands[i] = first ? first - format : -1;
5234
    }
5235
 
5236
  init_num_sign_bit_copies_in_rep ();
5237
}
5238
 
5239
/* Check whether this is a constant pool constant.  */
5240
bool
5241
constant_pool_constant_p (rtx x)
5242
{
5243
  x = avoid_constant_pool_reference (x);
5244
  return GET_CODE (x) == CONST_DOUBLE;
5245
}
5246
 
5247
/* If M is a bitmask that selects a field of low-order bits within an item but
5248
   not the entire word, return the length of the field.  Return -1 otherwise.
5249
   M is used in machine mode MODE.  */
5250
 
5251
int
5252
low_bitmask_len (enum machine_mode mode, unsigned HOST_WIDE_INT m)
5253
{
5254
  if (mode != VOIDmode)
5255
    {
5256
      if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT)
5257
        return -1;
5258
      m &= GET_MODE_MASK (mode);
5259
    }
5260
 
5261
  return exact_log2 (m + 1);
5262
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.