OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [rtlanal.c] - Blame information for rev 300

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Analyze RTL for GNU compiler.
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
 
23
#include "config.h"
24
#include "system.h"
25
#include "coretypes.h"
26
#include "tm.h"
27
#include "toplev.h"
28
#include "rtl.h"
29
#include "hard-reg-set.h"
30
#include "insn-config.h"
31
#include "recog.h"
32
#include "target.h"
33
#include "output.h"
34
#include "tm_p.h"
35
#include "flags.h"
36
#include "real.h"
37
#include "regs.h"
38
#include "function.h"
39
#include "df.h"
40
#include "tree.h"
41
 
42
/* Forward declarations */
43
static void set_of_1 (rtx, const_rtx, void *);
44
static bool covers_regno_p (const_rtx, unsigned int);
45
static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
46
static int rtx_referenced_p_1 (rtx *, void *);
47
static int computed_jump_p_1 (const_rtx);
48
static void parms_set (rtx, const_rtx, void *);
49
 
50
static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
51
                                                   const_rtx, enum machine_mode,
52
                                                   unsigned HOST_WIDE_INT);
53
static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
54
                                             const_rtx, enum machine_mode,
55
                                             unsigned HOST_WIDE_INT);
56
static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
57
                                                enum machine_mode,
58
                                                unsigned int);
59
static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
60
                                          enum machine_mode, unsigned int);
61
 
62
/* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
63
   -1 if a code has no such operand.  */
64
static int non_rtx_starting_operands[NUM_RTX_CODE];
65
 
66
/* Bit flags that specify the machine subtype we are compiling for.
67
   Bits are tested using macros TARGET_... defined in the tm.h file
68
   and set by `-m...' switches.  Must be defined in rtlanal.c.  */
69
 
70
int target_flags;
71
 
72
/* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
73
   If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
74
   SIGN_EXTEND then while narrowing we also have to enforce the
75
   representation and sign-extend the value to mode DESTINATION_REP.
76
 
77
   If the value is already sign-extended to DESTINATION_REP mode we
78
   can just switch to DESTINATION mode on it.  For each pair of
79
   integral modes SOURCE and DESTINATION, when truncating from SOURCE
80
   to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
81
   contains the number of high-order bits in SOURCE that have to be
82
   copies of the sign-bit so that we can do this mode-switch to
83
   DESTINATION.  */
84
 
85
static unsigned int
86
num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
87
 
88
/* Return 1 if the value of X is unstable
89
   (would be different at a different point in the program).
90
   The frame pointer, arg pointer, etc. are considered stable
91
   (within one function) and so is anything marked `unchanging'.  */
92
 
93
int
94
rtx_unstable_p (const_rtx x)
95
{
96
  const RTX_CODE code = GET_CODE (x);
97
  int i;
98
  const char *fmt;
99
 
100
  switch (code)
101
    {
102
    case MEM:
103
      return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
104
 
105
    case CONST:
106
    case CONST_INT:
107
    case CONST_DOUBLE:
108
    case CONST_FIXED:
109
    case CONST_VECTOR:
110
    case SYMBOL_REF:
111
    case LABEL_REF:
112
      return 0;
113
 
114
    case REG:
115
      /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
116
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
117
          /* The arg pointer varies if it is not a fixed register.  */
118
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
119
        return 0;
120
#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
121
      /* ??? When call-clobbered, the value is stable modulo the restore
122
         that must happen after a call.  This currently screws up local-alloc
123
         into believing that the restore is not needed.  */
124
      if (x == pic_offset_table_rtx)
125
        return 0;
126
#endif
127
      return 1;
128
 
129
    case ASM_OPERANDS:
130
      if (MEM_VOLATILE_P (x))
131
        return 1;
132
 
133
      /* Fall through.  */
134
 
135
    default:
136
      break;
137
    }
138
 
139
  fmt = GET_RTX_FORMAT (code);
140
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
141
    if (fmt[i] == 'e')
142
      {
143
        if (rtx_unstable_p (XEXP (x, i)))
144
          return 1;
145
      }
146
    else if (fmt[i] == 'E')
147
      {
148
        int j;
149
        for (j = 0; j < XVECLEN (x, i); j++)
150
          if (rtx_unstable_p (XVECEXP (x, i, j)))
151
            return 1;
152
      }
153
 
154
  return 0;
155
}
156
 
157
/* Return 1 if X has a value that can vary even between two
158
   executions of the program.  0 means X can be compared reliably
159
   against certain constants or near-constants.
160
   FOR_ALIAS is nonzero if we are called from alias analysis; if it is
161
   zero, we are slightly more conservative.
162
   The frame pointer and the arg pointer are considered constant.  */
163
 
164
bool
165
rtx_varies_p (const_rtx x, bool for_alias)
166
{
167
  RTX_CODE code;
168
  int i;
169
  const char *fmt;
170
 
171
  if (!x)
172
    return 0;
173
 
174
  code = GET_CODE (x);
175
  switch (code)
176
    {
177
    case MEM:
178
      return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
179
 
180
    case CONST:
181
    case CONST_INT:
182
    case CONST_DOUBLE:
183
    case CONST_FIXED:
184
    case CONST_VECTOR:
185
    case SYMBOL_REF:
186
    case LABEL_REF:
187
      return 0;
188
 
189
    case REG:
190
      /* Note that we have to test for the actual rtx used for the frame
191
         and arg pointers and not just the register number in case we have
192
         eliminated the frame and/or arg pointer and are using it
193
         for pseudos.  */
194
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
195
          /* The arg pointer varies if it is not a fixed register.  */
196
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
197
        return 0;
198
      if (x == pic_offset_table_rtx
199
#ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
200
          /* ??? When call-clobbered, the value is stable modulo the restore
201
             that must happen after a call.  This currently screws up
202
             local-alloc into believing that the restore is not needed, so we
203
             must return 0 only if we are called from alias analysis.  */
204
          && for_alias
205
#endif
206
          )
207
        return 0;
208
      return 1;
209
 
210
    case LO_SUM:
211
      /* The operand 0 of a LO_SUM is considered constant
212
         (in fact it is related specifically to operand 1)
213
         during alias analysis.  */
214
      return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
215
             || rtx_varies_p (XEXP (x, 1), for_alias);
216
 
217
    case ASM_OPERANDS:
218
      if (MEM_VOLATILE_P (x))
219
        return 1;
220
 
221
      /* Fall through.  */
222
 
223
    default:
224
      break;
225
    }
226
 
227
  fmt = GET_RTX_FORMAT (code);
228
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
229
    if (fmt[i] == 'e')
230
      {
231
        if (rtx_varies_p (XEXP (x, i), for_alias))
232
          return 1;
233
      }
234
    else if (fmt[i] == 'E')
235
      {
236
        int j;
237
        for (j = 0; j < XVECLEN (x, i); j++)
238
          if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
239
            return 1;
240
      }
241
 
242
  return 0;
243
}
244
 
245
/* Return nonzero if the use of X as an address in a MEM can cause a trap.
246
   MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
247
   whether nonzero is returned for unaligned memory accesses on strict
248
   alignment machines.  */
249
 
250
static int
251
rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
252
                       enum machine_mode mode, bool unaligned_mems)
253
{
254
  enum rtx_code code = GET_CODE (x);
255
 
256
  if (STRICT_ALIGNMENT
257
      && unaligned_mems
258
      && GET_MODE_SIZE (mode) != 0)
259
    {
260
      HOST_WIDE_INT actual_offset = offset;
261
#ifdef SPARC_STACK_BOUNDARY_HACK
262
      /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
263
             the real alignment of %sp.  However, when it does this, the
264
             alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
265
      if (SPARC_STACK_BOUNDARY_HACK
266
          && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
267
        actual_offset -= STACK_POINTER_OFFSET;
268
#endif
269
 
270
      if (actual_offset % GET_MODE_SIZE (mode) != 0)
271
        return 1;
272
    }
273
 
274
  switch (code)
275
    {
276
    case SYMBOL_REF:
277
      if (SYMBOL_REF_WEAK (x))
278
        return 1;
279
      if (!CONSTANT_POOL_ADDRESS_P (x))
280
        {
281
          tree decl;
282
          HOST_WIDE_INT decl_size;
283
 
284
          if (offset < 0)
285
            return 1;
286
          if (size == 0)
287
            size = GET_MODE_SIZE (mode);
288
          if (size == 0)
289
            return offset != 0;
290
 
291
          /* If the size of the access or of the symbol is unknown,
292
             assume the worst.  */
293
          decl = SYMBOL_REF_DECL (x);
294
 
295
          /* Else check that the access is in bounds.  TODO: restructure
296
             expr_size/tree_expr_size/int_expr_size and just use the latter.  */
297
          if (!decl)
298
            decl_size = -1;
299
          else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
300
            decl_size = (host_integerp (DECL_SIZE_UNIT (decl), 0)
301
                         ? tree_low_cst (DECL_SIZE_UNIT (decl), 0)
302
                         : -1);
303
          else if (TREE_CODE (decl) == STRING_CST)
304
            decl_size = TREE_STRING_LENGTH (decl);
305
          else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
306
            decl_size = int_size_in_bytes (TREE_TYPE (decl));
307
          else
308
            decl_size = -1;
309
 
310
          return (decl_size <= 0 ? offset != 0 : offset + size > decl_size);
311
        }
312
 
313
      return 0;
314
 
315
    case LABEL_REF:
316
      return 0;
317
 
318
    case REG:
319
      /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
320
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
321
          || x == stack_pointer_rtx
322
          /* The arg pointer varies if it is not a fixed register.  */
323
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
324
        return 0;
325
      /* All of the virtual frame registers are stack references.  */
326
      if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
327
          && REGNO (x) <= LAST_VIRTUAL_REGISTER)
328
        return 0;
329
      return 1;
330
 
331
    case CONST:
332
      return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
333
                                    mode, unaligned_mems);
334
 
335
    case PLUS:
336
      /* An address is assumed not to trap if:
337
         - it is the pic register plus a constant.  */
338
      if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
339
        return 0;
340
 
341
      /* - or it is an address that can't trap plus a constant integer,
342
           with the proper remainder modulo the mode size if we are
343
           considering unaligned memory references.  */
344
      if (CONST_INT_P (XEXP (x, 1))
345
          && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
346
                                     size, mode, unaligned_mems))
347
        return 0;
348
 
349
      return 1;
350
 
351
    case LO_SUM:
352
    case PRE_MODIFY:
353
      return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
354
                                    mode, unaligned_mems);
355
 
356
    case PRE_DEC:
357
    case PRE_INC:
358
    case POST_DEC:
359
    case POST_INC:
360
    case POST_MODIFY:
361
      return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
362
                                    mode, unaligned_mems);
363
 
364
    default:
365
      break;
366
    }
367
 
368
  /* If it isn't one of the case above, it can cause a trap.  */
369
  return 1;
370
}
371
 
372
/* Return nonzero if the use of X as an address in a MEM can cause a trap.  */
373
 
374
int
375
rtx_addr_can_trap_p (const_rtx x)
376
{
377
  return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false);
378
}
379
 
380
/* Return true if X is an address that is known to not be zero.  */
381
 
382
bool
383
nonzero_address_p (const_rtx x)
384
{
385
  const enum rtx_code code = GET_CODE (x);
386
 
387
  switch (code)
388
    {
389
    case SYMBOL_REF:
390
      return !SYMBOL_REF_WEAK (x);
391
 
392
    case LABEL_REF:
393
      return true;
394
 
395
    case REG:
396
      /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
397
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
398
          || x == stack_pointer_rtx
399
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
400
        return true;
401
      /* All of the virtual frame registers are stack references.  */
402
      if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
403
          && REGNO (x) <= LAST_VIRTUAL_REGISTER)
404
        return true;
405
      return false;
406
 
407
    case CONST:
408
      return nonzero_address_p (XEXP (x, 0));
409
 
410
    case PLUS:
411
      if (CONST_INT_P (XEXP (x, 1)))
412
        return nonzero_address_p (XEXP (x, 0));
413
      /* Handle PIC references.  */
414
      else if (XEXP (x, 0) == pic_offset_table_rtx
415
               && CONSTANT_P (XEXP (x, 1)))
416
        return true;
417
      return false;
418
 
419
    case PRE_MODIFY:
420
      /* Similar to the above; allow positive offsets.  Further, since
421
         auto-inc is only allowed in memories, the register must be a
422
         pointer.  */
423
      if (CONST_INT_P (XEXP (x, 1))
424
          && INTVAL (XEXP (x, 1)) > 0)
425
        return true;
426
      return nonzero_address_p (XEXP (x, 0));
427
 
428
    case PRE_INC:
429
      /* Similarly.  Further, the offset is always positive.  */
430
      return true;
431
 
432
    case PRE_DEC:
433
    case POST_DEC:
434
    case POST_INC:
435
    case POST_MODIFY:
436
      return nonzero_address_p (XEXP (x, 0));
437
 
438
    case LO_SUM:
439
      return nonzero_address_p (XEXP (x, 1));
440
 
441
    default:
442
      break;
443
    }
444
 
445
  /* If it isn't one of the case above, might be zero.  */
446
  return false;
447
}
448
 
449
/* Return 1 if X refers to a memory location whose address
450
   cannot be compared reliably with constant addresses,
451
   or if X refers to a BLKmode memory object.
452
   FOR_ALIAS is nonzero if we are called from alias analysis; if it is
453
   zero, we are slightly more conservative.  */
454
 
455
bool
456
rtx_addr_varies_p (const_rtx x, bool for_alias)
457
{
458
  enum rtx_code code;
459
  int i;
460
  const char *fmt;
461
 
462
  if (x == 0)
463
    return 0;
464
 
465
  code = GET_CODE (x);
466
  if (code == MEM)
467
    return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
468
 
469
  fmt = GET_RTX_FORMAT (code);
470
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
471
    if (fmt[i] == 'e')
472
      {
473
        if (rtx_addr_varies_p (XEXP (x, i), for_alias))
474
          return 1;
475
      }
476
    else if (fmt[i] == 'E')
477
      {
478
        int j;
479
        for (j = 0; j < XVECLEN (x, i); j++)
480
          if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
481
            return 1;
482
      }
483
  return 0;
484
}
485
 
486
/* Return the value of the integer term in X, if one is apparent;
487
   otherwise return 0.
488
   Only obvious integer terms are detected.
489
   This is used in cse.c with the `related_value' field.  */
490
 
491
HOST_WIDE_INT
492
get_integer_term (const_rtx x)
493
{
494
  if (GET_CODE (x) == CONST)
495
    x = XEXP (x, 0);
496
 
497
  if (GET_CODE (x) == MINUS
498
      && CONST_INT_P (XEXP (x, 1)))
499
    return - INTVAL (XEXP (x, 1));
500
  if (GET_CODE (x) == PLUS
501
      && CONST_INT_P (XEXP (x, 1)))
502
    return INTVAL (XEXP (x, 1));
503
  return 0;
504
}
505
 
506
/* If X is a constant, return the value sans apparent integer term;
507
   otherwise return 0.
508
   Only obvious integer terms are detected.  */
509
 
510
rtx
511
get_related_value (const_rtx x)
512
{
513
  if (GET_CODE (x) != CONST)
514
    return 0;
515
  x = XEXP (x, 0);
516
  if (GET_CODE (x) == PLUS
517
      && CONST_INT_P (XEXP (x, 1)))
518
    return XEXP (x, 0);
519
  else if (GET_CODE (x) == MINUS
520
           && CONST_INT_P (XEXP (x, 1)))
521
    return XEXP (x, 0);
522
  return 0;
523
}
524
 
525
/* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
526
   to somewhere in the same object or object_block as SYMBOL.  */
527
 
528
bool
529
offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
530
{
531
  tree decl;
532
 
533
  if (GET_CODE (symbol) != SYMBOL_REF)
534
    return false;
535
 
536
  if (offset == 0)
537
    return true;
538
 
539
  if (offset > 0)
540
    {
541
      if (CONSTANT_POOL_ADDRESS_P (symbol)
542
          && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
543
        return true;
544
 
545
      decl = SYMBOL_REF_DECL (symbol);
546
      if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
547
        return true;
548
    }
549
 
550
  if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
551
      && SYMBOL_REF_BLOCK (symbol)
552
      && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
553
      && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
554
          < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
555
    return true;
556
 
557
  return false;
558
}
559
 
560
/* Split X into a base and a constant offset, storing them in *BASE_OUT
561
   and *OFFSET_OUT respectively.  */
562
 
563
void
564
split_const (rtx x, rtx *base_out, rtx *offset_out)
565
{
566
  if (GET_CODE (x) == CONST)
567
    {
568
      x = XEXP (x, 0);
569
      if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
570
        {
571
          *base_out = XEXP (x, 0);
572
          *offset_out = XEXP (x, 1);
573
          return;
574
        }
575
    }
576
  *base_out = x;
577
  *offset_out = const0_rtx;
578
}
579
 
580
/* Return the number of places FIND appears within X.  If COUNT_DEST is
581
   zero, we do not count occurrences inside the destination of a SET.  */
582
 
583
int
584
count_occurrences (const_rtx x, const_rtx find, int count_dest)
585
{
586
  int i, j;
587
  enum rtx_code code;
588
  const char *format_ptr;
589
  int count;
590
 
591
  if (x == find)
592
    return 1;
593
 
594
  code = GET_CODE (x);
595
 
596
  switch (code)
597
    {
598
    case REG:
599
    case CONST_INT:
600
    case CONST_DOUBLE:
601
    case CONST_FIXED:
602
    case CONST_VECTOR:
603
    case SYMBOL_REF:
604
    case CODE_LABEL:
605
    case PC:
606
    case CC0:
607
      return 0;
608
 
609
    case EXPR_LIST:
610
      count = count_occurrences (XEXP (x, 0), find, count_dest);
611
      if (XEXP (x, 1))
612
        count += count_occurrences (XEXP (x, 1), find, count_dest);
613
      return count;
614
 
615
    case MEM:
616
      if (MEM_P (find) && rtx_equal_p (x, find))
617
        return 1;
618
      break;
619
 
620
    case SET:
621
      if (SET_DEST (x) == find && ! count_dest)
622
        return count_occurrences (SET_SRC (x), find, count_dest);
623
      break;
624
 
625
    default:
626
      break;
627
    }
628
 
629
  format_ptr = GET_RTX_FORMAT (code);
630
  count = 0;
631
 
632
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
633
    {
634
      switch (*format_ptr++)
635
        {
636
        case 'e':
637
          count += count_occurrences (XEXP (x, i), find, count_dest);
638
          break;
639
 
640
        case 'E':
641
          for (j = 0; j < XVECLEN (x, i); j++)
642
            count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
643
          break;
644
        }
645
    }
646
  return count;
647
}
648
 
649
 
650
/* Nonzero if register REG appears somewhere within IN.
651
   Also works if REG is not a register; in this case it checks
652
   for a subexpression of IN that is Lisp "equal" to REG.  */
653
 
654
int
655
reg_mentioned_p (const_rtx reg, const_rtx in)
656
{
657
  const char *fmt;
658
  int i;
659
  enum rtx_code code;
660
 
661
  if (in == 0)
662
    return 0;
663
 
664
  if (reg == in)
665
    return 1;
666
 
667
  if (GET_CODE (in) == LABEL_REF)
668
    return reg == XEXP (in, 0);
669
 
670
  code = GET_CODE (in);
671
 
672
  switch (code)
673
    {
674
      /* Compare registers by number.  */
675
    case REG:
676
      return REG_P (reg) && REGNO (in) == REGNO (reg);
677
 
678
      /* These codes have no constituent expressions
679
         and are unique.  */
680
    case SCRATCH:
681
    case CC0:
682
    case PC:
683
      return 0;
684
 
685
    case CONST_INT:
686
    case CONST_VECTOR:
687
    case CONST_DOUBLE:
688
    case CONST_FIXED:
689
      /* These are kept unique for a given value.  */
690
      return 0;
691
 
692
    default:
693
      break;
694
    }
695
 
696
  if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
697
    return 1;
698
 
699
  fmt = GET_RTX_FORMAT (code);
700
 
701
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
702
    {
703
      if (fmt[i] == 'E')
704
        {
705
          int j;
706
          for (j = XVECLEN (in, i) - 1; j >= 0; j--)
707
            if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
708
              return 1;
709
        }
710
      else if (fmt[i] == 'e'
711
               && reg_mentioned_p (reg, XEXP (in, i)))
712
        return 1;
713
    }
714
  return 0;
715
}
716
 
717
/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
718
   no CODE_LABEL insn.  */
719
 
720
int
721
no_labels_between_p (const_rtx beg, const_rtx end)
722
{
723
  rtx p;
724
  if (beg == end)
725
    return 0;
726
  for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
727
    if (LABEL_P (p))
728
      return 0;
729
  return 1;
730
}
731
 
732
/* Nonzero if register REG is used in an insn between
733
   FROM_INSN and TO_INSN (exclusive of those two).  */
734
 
735
int
736
reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
737
{
738
  rtx insn;
739
 
740
  if (from_insn == to_insn)
741
    return 0;
742
 
743
  for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
744
    if (NONDEBUG_INSN_P (insn)
745
        && (reg_overlap_mentioned_p (reg, PATTERN (insn))
746
           || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
747
      return 1;
748
  return 0;
749
}
750
 
751
/* Nonzero if the old value of X, a register, is referenced in BODY.  If X
752
   is entirely replaced by a new value and the only use is as a SET_DEST,
753
   we do not consider it a reference.  */
754
 
755
int
756
reg_referenced_p (const_rtx x, const_rtx body)
757
{
758
  int i;
759
 
760
  switch (GET_CODE (body))
761
    {
762
    case SET:
763
      if (reg_overlap_mentioned_p (x, SET_SRC (body)))
764
        return 1;
765
 
766
      /* If the destination is anything other than CC0, PC, a REG or a SUBREG
767
         of a REG that occupies all of the REG, the insn references X if
768
         it is mentioned in the destination.  */
769
      if (GET_CODE (SET_DEST (body)) != CC0
770
          && GET_CODE (SET_DEST (body)) != PC
771
          && !REG_P (SET_DEST (body))
772
          && ! (GET_CODE (SET_DEST (body)) == SUBREG
773
                && REG_P (SUBREG_REG (SET_DEST (body)))
774
                && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
775
                      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
776
                    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
777
                         + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
778
          && reg_overlap_mentioned_p (x, SET_DEST (body)))
779
        return 1;
780
      return 0;
781
 
782
    case ASM_OPERANDS:
783
      for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
784
        if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
785
          return 1;
786
      return 0;
787
 
788
    case CALL:
789
    case USE:
790
    case IF_THEN_ELSE:
791
      return reg_overlap_mentioned_p (x, body);
792
 
793
    case TRAP_IF:
794
      return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
795
 
796
    case PREFETCH:
797
      return reg_overlap_mentioned_p (x, XEXP (body, 0));
798
 
799
    case UNSPEC:
800
    case UNSPEC_VOLATILE:
801
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
802
        if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
803
          return 1;
804
      return 0;
805
 
806
    case PARALLEL:
807
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
808
        if (reg_referenced_p (x, XVECEXP (body, 0, i)))
809
          return 1;
810
      return 0;
811
 
812
    case CLOBBER:
813
      if (MEM_P (XEXP (body, 0)))
814
        if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
815
          return 1;
816
      return 0;
817
 
818
    case COND_EXEC:
819
      if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
820
        return 1;
821
      return reg_referenced_p (x, COND_EXEC_CODE (body));
822
 
823
    default:
824
      return 0;
825
    }
826
}
827
 
828
/* Nonzero if register REG is set or clobbered in an insn between
829
   FROM_INSN and TO_INSN (exclusive of those two).  */
830
 
831
int
832
reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
833
{
834
  const_rtx insn;
835
 
836
  if (from_insn == to_insn)
837
    return 0;
838
 
839
  for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
840
    if (INSN_P (insn) && reg_set_p (reg, insn))
841
      return 1;
842
  return 0;
843
}
844
 
845
/* Internals of reg_set_between_p.  */
846
int
847
reg_set_p (const_rtx reg, const_rtx insn)
848
{
849
  /* We can be passed an insn or part of one.  If we are passed an insn,
850
     check if a side-effect of the insn clobbers REG.  */
851
  if (INSN_P (insn)
852
      && (FIND_REG_INC_NOTE (insn, reg)
853
          || (CALL_P (insn)
854
              && ((REG_P (reg)
855
                   && REGNO (reg) < FIRST_PSEUDO_REGISTER
856
                   && overlaps_hard_reg_set_p (regs_invalidated_by_call,
857
                                               GET_MODE (reg), REGNO (reg)))
858
                  || MEM_P (reg)
859
                  || find_reg_fusage (insn, CLOBBER, reg)))))
860
    return 1;
861
 
862
  return set_of (reg, insn) != NULL_RTX;
863
}
864
 
865
/* Similar to reg_set_between_p, but check all registers in X.  Return 0
866
   only if none of them are modified between START and END.  Return 1 if
867
   X contains a MEM; this routine does use memory aliasing.  */
868
 
869
int
870
modified_between_p (const_rtx x, const_rtx start, const_rtx end)
871
{
872
  const enum rtx_code code = GET_CODE (x);
873
  const char *fmt;
874
  int i, j;
875
  rtx insn;
876
 
877
  if (start == end)
878
    return 0;
879
 
880
  switch (code)
881
    {
882
    case CONST_INT:
883
    case CONST_DOUBLE:
884
    case CONST_FIXED:
885
    case CONST_VECTOR:
886
    case CONST:
887
    case SYMBOL_REF:
888
    case LABEL_REF:
889
      return 0;
890
 
891
    case PC:
892
    case CC0:
893
      return 1;
894
 
895
    case MEM:
896
      if (modified_between_p (XEXP (x, 0), start, end))
897
        return 1;
898
      if (MEM_READONLY_P (x))
899
        return 0;
900
      for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
901
        if (memory_modified_in_insn_p (x, insn))
902
          return 1;
903
      return 0;
904
      break;
905
 
906
    case REG:
907
      return reg_set_between_p (x, start, end);
908
 
909
    default:
910
      break;
911
    }
912
 
913
  fmt = GET_RTX_FORMAT (code);
914
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
915
    {
916
      if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
917
        return 1;
918
 
919
      else if (fmt[i] == 'E')
920
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
921
          if (modified_between_p (XVECEXP (x, i, j), start, end))
922
            return 1;
923
    }
924
 
925
  return 0;
926
}
927
 
928
/* Similar to reg_set_p, but check all registers in X.  Return 0 only if none
929
   of them are modified in INSN.  Return 1 if X contains a MEM; this routine
930
   does use memory aliasing.  */
931
 
932
int
933
modified_in_p (const_rtx x, const_rtx insn)
934
{
935
  const enum rtx_code code = GET_CODE (x);
936
  const char *fmt;
937
  int i, j;
938
 
939
  switch (code)
940
    {
941
    case CONST_INT:
942
    case CONST_DOUBLE:
943
    case CONST_FIXED:
944
    case CONST_VECTOR:
945
    case CONST:
946
    case SYMBOL_REF:
947
    case LABEL_REF:
948
      return 0;
949
 
950
    case PC:
951
    case CC0:
952
      return 1;
953
 
954
    case MEM:
955
      if (modified_in_p (XEXP (x, 0), insn))
956
        return 1;
957
      if (MEM_READONLY_P (x))
958
        return 0;
959
      if (memory_modified_in_insn_p (x, insn))
960
        return 1;
961
      return 0;
962
      break;
963
 
964
    case REG:
965
      return reg_set_p (x, insn);
966
 
967
    default:
968
      break;
969
    }
970
 
971
  fmt = GET_RTX_FORMAT (code);
972
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
973
    {
974
      if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
975
        return 1;
976
 
977
      else if (fmt[i] == 'E')
978
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
979
          if (modified_in_p (XVECEXP (x, i, j), insn))
980
            return 1;
981
    }
982
 
983
  return 0;
984
}
985
 
986
/* Helper function for set_of.  */
987
struct set_of_data
988
  {
989
    const_rtx found;
990
    const_rtx pat;
991
  };
992
 
993
static void
994
set_of_1 (rtx x, const_rtx pat, void *data1)
995
{
996
  struct set_of_data *const data = (struct set_of_data *) (data1);
997
  if (rtx_equal_p (x, data->pat)
998
      || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
999
    data->found = pat;
1000
}
1001
 
1002
/* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1003
   (either directly or via STRICT_LOW_PART and similar modifiers).  */
1004
const_rtx
1005
set_of (const_rtx pat, const_rtx insn)
1006
{
1007
  struct set_of_data data;
1008
  data.found = NULL_RTX;
1009
  data.pat = pat;
1010
  note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1011
  return data.found;
1012
}
1013
 
1014
/* Given an INSN, return a SET expression if this insn has only a single SET.
1015
   It may also have CLOBBERs, USEs, or SET whose output
1016
   will not be used, which we ignore.  */
1017
 
1018
rtx
1019
single_set_2 (const_rtx insn, const_rtx pat)
1020
{
1021
  rtx set = NULL;
1022
  int set_verified = 1;
1023
  int i;
1024
 
1025
  if (GET_CODE (pat) == PARALLEL)
1026
    {
1027
      for (i = 0; i < XVECLEN (pat, 0); i++)
1028
        {
1029
          rtx sub = XVECEXP (pat, 0, i);
1030
          switch (GET_CODE (sub))
1031
            {
1032
            case USE:
1033
            case CLOBBER:
1034
              break;
1035
 
1036
            case SET:
1037
              /* We can consider insns having multiple sets, where all
1038
                 but one are dead as single set insns.  In common case
1039
                 only single set is present in the pattern so we want
1040
                 to avoid checking for REG_UNUSED notes unless necessary.
1041
 
1042
                 When we reach set first time, we just expect this is
1043
                 the single set we are looking for and only when more
1044
                 sets are found in the insn, we check them.  */
1045
              if (!set_verified)
1046
                {
1047
                  if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1048
                      && !side_effects_p (set))
1049
                    set = NULL;
1050
                  else
1051
                    set_verified = 1;
1052
                }
1053
              if (!set)
1054
                set = sub, set_verified = 0;
1055
              else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1056
                       || side_effects_p (sub))
1057
                return NULL_RTX;
1058
              break;
1059
 
1060
            default:
1061
              return NULL_RTX;
1062
            }
1063
        }
1064
    }
1065
  return set;
1066
}
1067
 
1068
/* Given an INSN, return nonzero if it has more than one SET, else return
1069
   zero.  */
1070
 
1071
int
1072
multiple_sets (const_rtx insn)
1073
{
1074
  int found;
1075
  int i;
1076
 
1077
  /* INSN must be an insn.  */
1078
  if (! INSN_P (insn))
1079
    return 0;
1080
 
1081
  /* Only a PARALLEL can have multiple SETs.  */
1082
  if (GET_CODE (PATTERN (insn)) == PARALLEL)
1083
    {
1084
      for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1085
        if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1086
          {
1087
            /* If we have already found a SET, then return now.  */
1088
            if (found)
1089
              return 1;
1090
            else
1091
              found = 1;
1092
          }
1093
    }
1094
 
1095
  /* Either zero or one SET.  */
1096
  return 0;
1097
}
1098
 
1099
/* Return nonzero if the destination of SET equals the source
1100
   and there are no side effects.  */
1101
 
1102
int
1103
set_noop_p (const_rtx set)
1104
{
1105
  rtx src = SET_SRC (set);
1106
  rtx dst = SET_DEST (set);
1107
 
1108
  if (dst == pc_rtx && src == pc_rtx)
1109
    return 1;
1110
 
1111
  if (MEM_P (dst) && MEM_P (src))
1112
    return rtx_equal_p (dst, src) && !side_effects_p (dst);
1113
 
1114
  if (GET_CODE (dst) == ZERO_EXTRACT)
1115
    return rtx_equal_p (XEXP (dst, 0), src)
1116
           && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1117
           && !side_effects_p (src);
1118
 
1119
  if (GET_CODE (dst) == STRICT_LOW_PART)
1120
    dst = XEXP (dst, 0);
1121
 
1122
  if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1123
    {
1124
      if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1125
        return 0;
1126
      src = SUBREG_REG (src);
1127
      dst = SUBREG_REG (dst);
1128
    }
1129
 
1130
  return (REG_P (src) && REG_P (dst)
1131
          && REGNO (src) == REGNO (dst));
1132
}
1133
 
1134
/* Return nonzero if an insn consists only of SETs, each of which only sets a
1135
   value to itself.  */
1136
 
1137
int
1138
noop_move_p (const_rtx insn)
1139
{
1140
  rtx pat = PATTERN (insn);
1141
 
1142
  if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1143
    return 1;
1144
 
1145
  /* Insns carrying these notes are useful later on.  */
1146
  if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1147
    return 0;
1148
 
1149
  if (GET_CODE (pat) == SET && set_noop_p (pat))
1150
    return 1;
1151
 
1152
  if (GET_CODE (pat) == PARALLEL)
1153
    {
1154
      int i;
1155
      /* If nothing but SETs of registers to themselves,
1156
         this insn can also be deleted.  */
1157
      for (i = 0; i < XVECLEN (pat, 0); i++)
1158
        {
1159
          rtx tem = XVECEXP (pat, 0, i);
1160
 
1161
          if (GET_CODE (tem) == USE
1162
              || GET_CODE (tem) == CLOBBER)
1163
            continue;
1164
 
1165
          if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1166
            return 0;
1167
        }
1168
 
1169
      return 1;
1170
    }
1171
  return 0;
1172
}
1173
 
1174
 
1175
/* Return the last thing that X was assigned from before *PINSN.  If VALID_TO
1176
   is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1177
   If the object was modified, if we hit a partial assignment to X, or hit a
1178
   CODE_LABEL first, return X.  If we found an assignment, update *PINSN to
1179
   point to it.  ALLOW_HWREG is set to 1 if hardware registers are allowed to
1180
   be the src.  */
1181
 
1182
rtx
1183
find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1184
{
1185
  rtx p;
1186
 
1187
  for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1188
       p = PREV_INSN (p))
1189
    if (INSN_P (p))
1190
      {
1191
        rtx set = single_set (p);
1192
        rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1193
 
1194
        if (set && rtx_equal_p (x, SET_DEST (set)))
1195
          {
1196
            rtx src = SET_SRC (set);
1197
 
1198
            if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1199
              src = XEXP (note, 0);
1200
 
1201
            if ((valid_to == NULL_RTX
1202
                 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1203
                /* Reject hard registers because we don't usually want
1204
                   to use them; we'd rather use a pseudo.  */
1205
                && (! (REG_P (src)
1206
                      && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1207
              {
1208
                *pinsn = p;
1209
                return src;
1210
              }
1211
          }
1212
 
1213
        /* If set in non-simple way, we don't have a value.  */
1214
        if (reg_set_p (x, p))
1215
          break;
1216
      }
1217
 
1218
  return x;
1219
}
1220
 
1221
/* Return nonzero if register in range [REGNO, ENDREGNO)
1222
   appears either explicitly or implicitly in X
1223
   other than being stored into.
1224
 
1225
   References contained within the substructure at LOC do not count.
1226
   LOC may be zero, meaning don't ignore anything.  */
1227
 
1228
int
1229
refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1230
                   rtx *loc)
1231
{
1232
  int i;
1233
  unsigned int x_regno;
1234
  RTX_CODE code;
1235
  const char *fmt;
1236
 
1237
 repeat:
1238
  /* The contents of a REG_NONNEG note is always zero, so we must come here
1239
     upon repeat in case the last REG_NOTE is a REG_NONNEG note.  */
1240
  if (x == 0)
1241
    return 0;
1242
 
1243
  code = GET_CODE (x);
1244
 
1245
  switch (code)
1246
    {
1247
    case REG:
1248
      x_regno = REGNO (x);
1249
 
1250
      /* If we modifying the stack, frame, or argument pointer, it will
1251
         clobber a virtual register.  In fact, we could be more precise,
1252
         but it isn't worth it.  */
1253
      if ((x_regno == STACK_POINTER_REGNUM
1254
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1255
           || x_regno == ARG_POINTER_REGNUM
1256
#endif
1257
           || x_regno == FRAME_POINTER_REGNUM)
1258
          && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1259
        return 1;
1260
 
1261
      return endregno > x_regno && regno < END_REGNO (x);
1262
 
1263
    case SUBREG:
1264
      /* If this is a SUBREG of a hard reg, we can see exactly which
1265
         registers are being modified.  Otherwise, handle normally.  */
1266
      if (REG_P (SUBREG_REG (x))
1267
          && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1268
        {
1269
          unsigned int inner_regno = subreg_regno (x);
1270
          unsigned int inner_endregno
1271
            = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1272
                             ? subreg_nregs (x) : 1);
1273
 
1274
          return endregno > inner_regno && regno < inner_endregno;
1275
        }
1276
      break;
1277
 
1278
    case CLOBBER:
1279
    case SET:
1280
      if (&SET_DEST (x) != loc
1281
          /* Note setting a SUBREG counts as referring to the REG it is in for
1282
             a pseudo but not for hard registers since we can
1283
             treat each word individually.  */
1284
          && ((GET_CODE (SET_DEST (x)) == SUBREG
1285
               && loc != &SUBREG_REG (SET_DEST (x))
1286
               && REG_P (SUBREG_REG (SET_DEST (x)))
1287
               && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1288
               && refers_to_regno_p (regno, endregno,
1289
                                     SUBREG_REG (SET_DEST (x)), loc))
1290
              || (!REG_P (SET_DEST (x))
1291
                  && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1292
        return 1;
1293
 
1294
      if (code == CLOBBER || loc == &SET_SRC (x))
1295
        return 0;
1296
      x = SET_SRC (x);
1297
      goto repeat;
1298
 
1299
    default:
1300
      break;
1301
    }
1302
 
1303
  /* X does not match, so try its subexpressions.  */
1304
 
1305
  fmt = GET_RTX_FORMAT (code);
1306
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1307
    {
1308
      if (fmt[i] == 'e' && loc != &XEXP (x, i))
1309
        {
1310
          if (i == 0)
1311
            {
1312
              x = XEXP (x, 0);
1313
              goto repeat;
1314
            }
1315
          else
1316
            if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1317
              return 1;
1318
        }
1319
      else if (fmt[i] == 'E')
1320
        {
1321
          int j;
1322
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1323
            if (loc != &XVECEXP (x, i, j)
1324
                && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1325
              return 1;
1326
        }
1327
    }
1328
  return 0;
1329
}
1330
 
1331
/* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
1332
   we check if any register number in X conflicts with the relevant register
1333
   numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
1334
   contains a MEM (we don't bother checking for memory addresses that can't
1335
   conflict because we expect this to be a rare case.  */
1336
 
1337
int
1338
reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1339
{
1340
  unsigned int regno, endregno;
1341
 
1342
  /* If either argument is a constant, then modifying X can not
1343
     affect IN.  Here we look at IN, we can profitably combine
1344
     CONSTANT_P (x) with the switch statement below.  */
1345
  if (CONSTANT_P (in))
1346
    return 0;
1347
 
1348
 recurse:
1349
  switch (GET_CODE (x))
1350
    {
1351
    case STRICT_LOW_PART:
1352
    case ZERO_EXTRACT:
1353
    case SIGN_EXTRACT:
1354
      /* Overly conservative.  */
1355
      x = XEXP (x, 0);
1356
      goto recurse;
1357
 
1358
    case SUBREG:
1359
      regno = REGNO (SUBREG_REG (x));
1360
      if (regno < FIRST_PSEUDO_REGISTER)
1361
        regno = subreg_regno (x);
1362
      endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1363
                          ? subreg_nregs (x) : 1);
1364
      goto do_reg;
1365
 
1366
    case REG:
1367
      regno = REGNO (x);
1368
      endregno = END_REGNO (x);
1369
    do_reg:
1370
      return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1371
 
1372
    case MEM:
1373
      {
1374
        const char *fmt;
1375
        int i;
1376
 
1377
        if (MEM_P (in))
1378
          return 1;
1379
 
1380
        fmt = GET_RTX_FORMAT (GET_CODE (in));
1381
        for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1382
          if (fmt[i] == 'e')
1383
            {
1384
              if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1385
                return 1;
1386
            }
1387
          else if (fmt[i] == 'E')
1388
            {
1389
              int j;
1390
              for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1391
                if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1392
                  return 1;
1393
            }
1394
 
1395
        return 0;
1396
      }
1397
 
1398
    case SCRATCH:
1399
    case PC:
1400
    case CC0:
1401
      return reg_mentioned_p (x, in);
1402
 
1403
    case PARALLEL:
1404
      {
1405
        int i;
1406
 
1407
        /* If any register in here refers to it we return true.  */
1408
        for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1409
          if (XEXP (XVECEXP (x, 0, i), 0) != 0
1410
              && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1411
            return 1;
1412
        return 0;
1413
      }
1414
 
1415
    default:
1416
      gcc_assert (CONSTANT_P (x));
1417
      return 0;
1418
    }
1419
}
1420
 
1421
/* Call FUN on each register or MEM that is stored into or clobbered by X.
1422
   (X would be the pattern of an insn).  DATA is an arbitrary pointer,
1423
   ignored by note_stores, but passed to FUN.
1424
 
1425
   FUN receives three arguments:
1426
   1. the REG, MEM, CC0 or PC being stored in or clobbered,
1427
   2. the SET or CLOBBER rtx that does the store,
1428
   3. the pointer DATA provided to note_stores.
1429
 
1430
  If the item being stored in or clobbered is a SUBREG of a hard register,
1431
  the SUBREG will be passed.  */
1432
 
1433
void
1434
note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
1435
{
1436
  int i;
1437
 
1438
  if (GET_CODE (x) == COND_EXEC)
1439
    x = COND_EXEC_CODE (x);
1440
 
1441
  if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1442
    {
1443
      rtx dest = SET_DEST (x);
1444
 
1445
      while ((GET_CODE (dest) == SUBREG
1446
              && (!REG_P (SUBREG_REG (dest))
1447
                  || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1448
             || GET_CODE (dest) == ZERO_EXTRACT
1449
             || GET_CODE (dest) == STRICT_LOW_PART)
1450
        dest = XEXP (dest, 0);
1451
 
1452
      /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1453
         each of whose first operand is a register.  */
1454
      if (GET_CODE (dest) == PARALLEL)
1455
        {
1456
          for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1457
            if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1458
              (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1459
        }
1460
      else
1461
        (*fun) (dest, x, data);
1462
    }
1463
 
1464
  else if (GET_CODE (x) == PARALLEL)
1465
    for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1466
      note_stores (XVECEXP (x, 0, i), fun, data);
1467
}
1468
 
1469
/* Like notes_stores, but call FUN for each expression that is being
1470
   referenced in PBODY, a pointer to the PATTERN of an insn.  We only call
1471
   FUN for each expression, not any interior subexpressions.  FUN receives a
1472
   pointer to the expression and the DATA passed to this function.
1473
 
1474
   Note that this is not quite the same test as that done in reg_referenced_p
1475
   since that considers something as being referenced if it is being
1476
   partially set, while we do not.  */
1477
 
1478
void
1479
note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1480
{
1481
  rtx body = *pbody;
1482
  int i;
1483
 
1484
  switch (GET_CODE (body))
1485
    {
1486
    case COND_EXEC:
1487
      (*fun) (&COND_EXEC_TEST (body), data);
1488
      note_uses (&COND_EXEC_CODE (body), fun, data);
1489
      return;
1490
 
1491
    case PARALLEL:
1492
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1493
        note_uses (&XVECEXP (body, 0, i), fun, data);
1494
      return;
1495
 
1496
    case SEQUENCE:
1497
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1498
        note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1499
      return;
1500
 
1501
    case USE:
1502
      (*fun) (&XEXP (body, 0), data);
1503
      return;
1504
 
1505
    case ASM_OPERANDS:
1506
      for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1507
        (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1508
      return;
1509
 
1510
    case TRAP_IF:
1511
      (*fun) (&TRAP_CONDITION (body), data);
1512
      return;
1513
 
1514
    case PREFETCH:
1515
      (*fun) (&XEXP (body, 0), data);
1516
      return;
1517
 
1518
    case UNSPEC:
1519
    case UNSPEC_VOLATILE:
1520
      for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1521
        (*fun) (&XVECEXP (body, 0, i), data);
1522
      return;
1523
 
1524
    case CLOBBER:
1525
      if (MEM_P (XEXP (body, 0)))
1526
        (*fun) (&XEXP (XEXP (body, 0), 0), data);
1527
      return;
1528
 
1529
    case SET:
1530
      {
1531
        rtx dest = SET_DEST (body);
1532
 
1533
        /* For sets we replace everything in source plus registers in memory
1534
           expression in store and operands of a ZERO_EXTRACT.  */
1535
        (*fun) (&SET_SRC (body), data);
1536
 
1537
        if (GET_CODE (dest) == ZERO_EXTRACT)
1538
          {
1539
            (*fun) (&XEXP (dest, 1), data);
1540
            (*fun) (&XEXP (dest, 2), data);
1541
          }
1542
 
1543
        while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1544
          dest = XEXP (dest, 0);
1545
 
1546
        if (MEM_P (dest))
1547
          (*fun) (&XEXP (dest, 0), data);
1548
      }
1549
      return;
1550
 
1551
    default:
1552
      /* All the other possibilities never store.  */
1553
      (*fun) (pbody, data);
1554
      return;
1555
    }
1556
}
1557
 
1558
/* Return nonzero if X's old contents don't survive after INSN.
1559
   This will be true if X is (cc0) or if X is a register and
1560
   X dies in INSN or because INSN entirely sets X.
1561
 
1562
   "Entirely set" means set directly and not through a SUBREG, or
1563
   ZERO_EXTRACT, so no trace of the old contents remains.
1564
   Likewise, REG_INC does not count.
1565
 
1566
   REG may be a hard or pseudo reg.  Renumbering is not taken into account,
1567
   but for this use that makes no difference, since regs don't overlap
1568
   during their lifetimes.  Therefore, this function may be used
1569
   at any time after deaths have been computed.
1570
 
1571
   If REG is a hard reg that occupies multiple machine registers, this
1572
   function will only return 1 if each of those registers will be replaced
1573
   by INSN.  */
1574
 
1575
int
1576
dead_or_set_p (const_rtx insn, const_rtx x)
1577
{
1578
  unsigned int regno, end_regno;
1579
  unsigned int i;
1580
 
1581
  /* Can't use cc0_rtx below since this file is used by genattrtab.c.  */
1582
  if (GET_CODE (x) == CC0)
1583
    return 1;
1584
 
1585
  gcc_assert (REG_P (x));
1586
 
1587
  regno = REGNO (x);
1588
  end_regno = END_REGNO (x);
1589
  for (i = regno; i < end_regno; i++)
1590
    if (! dead_or_set_regno_p (insn, i))
1591
      return 0;
1592
 
1593
  return 1;
1594
}
1595
 
1596
/* Return TRUE iff DEST is a register or subreg of a register and
1597
   doesn't change the number of words of the inner register, and any
1598
   part of the register is TEST_REGNO.  */
1599
 
1600
static bool
1601
covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
1602
{
1603
  unsigned int regno, endregno;
1604
 
1605
  if (GET_CODE (dest) == SUBREG
1606
      && (((GET_MODE_SIZE (GET_MODE (dest))
1607
            + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1608
          == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1609
               + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1610
    dest = SUBREG_REG (dest);
1611
 
1612
  if (!REG_P (dest))
1613
    return false;
1614
 
1615
  regno = REGNO (dest);
1616
  endregno = END_REGNO (dest);
1617
  return (test_regno >= regno && test_regno < endregno);
1618
}
1619
 
1620
/* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1621
   any member matches the covers_regno_no_parallel_p criteria.  */
1622
 
1623
static bool
1624
covers_regno_p (const_rtx dest, unsigned int test_regno)
1625
{
1626
  if (GET_CODE (dest) == PARALLEL)
1627
    {
1628
      /* Some targets place small structures in registers for return
1629
         values of functions, and those registers are wrapped in
1630
         PARALLELs that we may see as the destination of a SET.  */
1631
      int i;
1632
 
1633
      for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1634
        {
1635
          rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1636
          if (inner != NULL_RTX
1637
              && covers_regno_no_parallel_p (inner, test_regno))
1638
            return true;
1639
        }
1640
 
1641
      return false;
1642
    }
1643
  else
1644
    return covers_regno_no_parallel_p (dest, test_regno);
1645
}
1646
 
1647
/* Utility function for dead_or_set_p to check an individual register. */
1648
 
1649
int
1650
dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
1651
{
1652
  const_rtx pattern;
1653
 
1654
  /* See if there is a death note for something that includes TEST_REGNO.  */
1655
  if (find_regno_note (insn, REG_DEAD, test_regno))
1656
    return 1;
1657
 
1658
  if (CALL_P (insn)
1659
      && find_regno_fusage (insn, CLOBBER, test_regno))
1660
    return 1;
1661
 
1662
  pattern = PATTERN (insn);
1663
 
1664
  if (GET_CODE (pattern) == COND_EXEC)
1665
    pattern = COND_EXEC_CODE (pattern);
1666
 
1667
  if (GET_CODE (pattern) == SET)
1668
    return covers_regno_p (SET_DEST (pattern), test_regno);
1669
  else if (GET_CODE (pattern) == PARALLEL)
1670
    {
1671
      int i;
1672
 
1673
      for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1674
        {
1675
          rtx body = XVECEXP (pattern, 0, i);
1676
 
1677
          if (GET_CODE (body) == COND_EXEC)
1678
            body = COND_EXEC_CODE (body);
1679
 
1680
          if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1681
              && covers_regno_p (SET_DEST (body), test_regno))
1682
            return 1;
1683
        }
1684
    }
1685
 
1686
  return 0;
1687
}
1688
 
1689
/* Return the reg-note of kind KIND in insn INSN, if there is one.
1690
   If DATUM is nonzero, look for one whose datum is DATUM.  */
1691
 
1692
rtx
1693
find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
1694
{
1695
  rtx link;
1696
 
1697
  gcc_assert (insn);
1698
 
1699
  /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1700
  if (! INSN_P (insn))
1701
    return 0;
1702
  if (datum == 0)
1703
    {
1704
      for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1705
        if (REG_NOTE_KIND (link) == kind)
1706
          return link;
1707
      return 0;
1708
    }
1709
 
1710
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1711
    if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1712
      return link;
1713
  return 0;
1714
}
1715
 
1716
/* Return the reg-note of kind KIND in insn INSN which applies to register
1717
   number REGNO, if any.  Return 0 if there is no such reg-note.  Note that
1718
   the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1719
   it might be the case that the note overlaps REGNO.  */
1720
 
1721
rtx
1722
find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
1723
{
1724
  rtx link;
1725
 
1726
  /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1727
  if (! INSN_P (insn))
1728
    return 0;
1729
 
1730
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1731
    if (REG_NOTE_KIND (link) == kind
1732
        /* Verify that it is a register, so that scratch and MEM won't cause a
1733
           problem here.  */
1734
        && REG_P (XEXP (link, 0))
1735
        && REGNO (XEXP (link, 0)) <= regno
1736
        && END_REGNO (XEXP (link, 0)) > regno)
1737
      return link;
1738
  return 0;
1739
}
1740
 
1741
/* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1742
   has such a note.  */
1743
 
1744
rtx
1745
find_reg_equal_equiv_note (const_rtx insn)
1746
{
1747
  rtx link;
1748
 
1749
  if (!INSN_P (insn))
1750
    return 0;
1751
 
1752
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1753
    if (REG_NOTE_KIND (link) == REG_EQUAL
1754
        || REG_NOTE_KIND (link) == REG_EQUIV)
1755
      {
1756
        /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1757
           insns that have multiple sets.  Checking single_set to
1758
           make sure of this is not the proper check, as explained
1759
           in the comment in set_unique_reg_note.
1760
 
1761
           This should be changed into an assert.  */
1762
        if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1763
          return 0;
1764
        return link;
1765
      }
1766
  return NULL;
1767
}
1768
 
1769
/* Check whether INSN is a single_set whose source is known to be
1770
   equivalent to a constant.  Return that constant if so, otherwise
1771
   return null.  */
1772
 
1773
rtx
1774
find_constant_src (const_rtx insn)
1775
{
1776
  rtx note, set, x;
1777
 
1778
  set = single_set (insn);
1779
  if (set)
1780
    {
1781
      x = avoid_constant_pool_reference (SET_SRC (set));
1782
      if (CONSTANT_P (x))
1783
        return x;
1784
    }
1785
 
1786
  note = find_reg_equal_equiv_note (insn);
1787
  if (note && CONSTANT_P (XEXP (note, 0)))
1788
    return XEXP (note, 0);
1789
 
1790
  return NULL_RTX;
1791
}
1792
 
1793
/* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1794
   in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1795
 
1796
int
1797
find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
1798
{
1799
  /* If it's not a CALL_INSN, it can't possibly have a
1800
     CALL_INSN_FUNCTION_USAGE field, so don't bother checking.  */
1801
  if (!CALL_P (insn))
1802
    return 0;
1803
 
1804
  gcc_assert (datum);
1805
 
1806
  if (!REG_P (datum))
1807
    {
1808
      rtx link;
1809
 
1810
      for (link = CALL_INSN_FUNCTION_USAGE (insn);
1811
           link;
1812
           link = XEXP (link, 1))
1813
        if (GET_CODE (XEXP (link, 0)) == code
1814
            && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1815
          return 1;
1816
    }
1817
  else
1818
    {
1819
      unsigned int regno = REGNO (datum);
1820
 
1821
      /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1822
         to pseudo registers, so don't bother checking.  */
1823
 
1824
      if (regno < FIRST_PSEUDO_REGISTER)
1825
        {
1826
          unsigned int end_regno = END_HARD_REGNO (datum);
1827
          unsigned int i;
1828
 
1829
          for (i = regno; i < end_regno; i++)
1830
            if (find_regno_fusage (insn, code, i))
1831
              return 1;
1832
        }
1833
    }
1834
 
1835
  return 0;
1836
}
1837
 
1838
/* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1839
   in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1840
 
1841
int
1842
find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
1843
{
1844
  rtx link;
1845
 
1846
  /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1847
     to pseudo registers, so don't bother checking.  */
1848
 
1849
  if (regno >= FIRST_PSEUDO_REGISTER
1850
      || !CALL_P (insn) )
1851
    return 0;
1852
 
1853
  for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1854
    {
1855
      rtx op, reg;
1856
 
1857
      if (GET_CODE (op = XEXP (link, 0)) == code
1858
          && REG_P (reg = XEXP (op, 0))
1859
          && REGNO (reg) <= regno
1860
          && END_HARD_REGNO (reg) > regno)
1861
        return 1;
1862
    }
1863
 
1864
  return 0;
1865
}
1866
 
1867
 
1868
/* Allocate a register note with kind KIND and datum DATUM.  LIST is
1869
   stored as the pointer to the next register note.  */
1870
 
1871
rtx
1872
alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
1873
{
1874
  rtx note;
1875
 
1876
  switch (kind)
1877
    {
1878
    case REG_CC_SETTER:
1879
    case REG_CC_USER:
1880
    case REG_LABEL_TARGET:
1881
    case REG_LABEL_OPERAND:
1882
      /* These types of register notes use an INSN_LIST rather than an
1883
         EXPR_LIST, so that copying is done right and dumps look
1884
         better.  */
1885
      note = alloc_INSN_LIST (datum, list);
1886
      PUT_REG_NOTE_KIND (note, kind);
1887
      break;
1888
 
1889
    default:
1890
      note = alloc_EXPR_LIST (kind, datum, list);
1891
      break;
1892
    }
1893
 
1894
  return note;
1895
}
1896
 
1897
/* Add register note with kind KIND and datum DATUM to INSN.  */
1898
 
1899
void
1900
add_reg_note (rtx insn, enum reg_note kind, rtx datum)
1901
{
1902
  REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
1903
}
1904
 
1905
/* Remove register note NOTE from the REG_NOTES of INSN.  */
1906
 
1907
void
1908
remove_note (rtx insn, const_rtx note)
1909
{
1910
  rtx link;
1911
 
1912
  if (note == NULL_RTX)
1913
    return;
1914
 
1915
  if (REG_NOTES (insn) == note)
1916
    REG_NOTES (insn) = XEXP (note, 1);
1917
  else
1918
    for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1919
      if (XEXP (link, 1) == note)
1920
        {
1921
          XEXP (link, 1) = XEXP (note, 1);
1922
          break;
1923
        }
1924
 
1925
  switch (REG_NOTE_KIND (note))
1926
    {
1927
    case REG_EQUAL:
1928
    case REG_EQUIV:
1929
      df_notes_rescan (insn);
1930
      break;
1931
    default:
1932
      break;
1933
    }
1934
}
1935
 
1936
/* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes.  */
1937
 
1938
void
1939
remove_reg_equal_equiv_notes (rtx insn)
1940
{
1941
  rtx *loc;
1942
 
1943
  loc = &REG_NOTES (insn);
1944
  while (*loc)
1945
    {
1946
      enum reg_note kind = REG_NOTE_KIND (*loc);
1947
      if (kind == REG_EQUAL || kind == REG_EQUIV)
1948
        *loc = XEXP (*loc, 1);
1949
      else
1950
        loc = &XEXP (*loc, 1);
1951
    }
1952
}
1953
 
1954
/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1955
   return 1 if it is found.  A simple equality test is used to determine if
1956
   NODE matches.  */
1957
 
1958
int
1959
in_expr_list_p (const_rtx listp, const_rtx node)
1960
{
1961
  const_rtx x;
1962
 
1963
  for (x = listp; x; x = XEXP (x, 1))
1964
    if (node == XEXP (x, 0))
1965
      return 1;
1966
 
1967
  return 0;
1968
}
1969
 
1970
/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1971
   remove that entry from the list if it is found.
1972
 
1973
   A simple equality test is used to determine if NODE matches.  */
1974
 
1975
void
1976
remove_node_from_expr_list (const_rtx node, rtx *listp)
1977
{
1978
  rtx temp = *listp;
1979
  rtx prev = NULL_RTX;
1980
 
1981
  while (temp)
1982
    {
1983
      if (node == XEXP (temp, 0))
1984
        {
1985
          /* Splice the node out of the list.  */
1986
          if (prev)
1987
            XEXP (prev, 1) = XEXP (temp, 1);
1988
          else
1989
            *listp = XEXP (temp, 1);
1990
 
1991
          return;
1992
        }
1993
 
1994
      prev = temp;
1995
      temp = XEXP (temp, 1);
1996
    }
1997
}
1998
 
1999
/* Nonzero if X contains any volatile instructions.  These are instructions
2000
   which may cause unpredictable machine state instructions, and thus no
2001
   instructions should be moved or combined across them.  This includes
2002
   only volatile asms and UNSPEC_VOLATILE instructions.  */
2003
 
2004
int
2005
volatile_insn_p (const_rtx x)
2006
{
2007
  const RTX_CODE code = GET_CODE (x);
2008
  switch (code)
2009
    {
2010
    case LABEL_REF:
2011
    case SYMBOL_REF:
2012
    case CONST_INT:
2013
    case CONST:
2014
    case CONST_DOUBLE:
2015
    case CONST_FIXED:
2016
    case CONST_VECTOR:
2017
    case CC0:
2018
    case PC:
2019
    case REG:
2020
    case SCRATCH:
2021
    case CLOBBER:
2022
    case ADDR_VEC:
2023
    case ADDR_DIFF_VEC:
2024
    case CALL:
2025
    case MEM:
2026
      return 0;
2027
 
2028
    case UNSPEC_VOLATILE:
2029
 /* case TRAP_IF: This isn't clear yet.  */
2030
      return 1;
2031
 
2032
    case ASM_INPUT:
2033
    case ASM_OPERANDS:
2034
      if (MEM_VOLATILE_P (x))
2035
        return 1;
2036
 
2037
    default:
2038
      break;
2039
    }
2040
 
2041
  /* Recursively scan the operands of this expression.  */
2042
 
2043
  {
2044
    const char *const fmt = GET_RTX_FORMAT (code);
2045
    int i;
2046
 
2047
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2048
      {
2049
        if (fmt[i] == 'e')
2050
          {
2051
            if (volatile_insn_p (XEXP (x, i)))
2052
              return 1;
2053
          }
2054
        else if (fmt[i] == 'E')
2055
          {
2056
            int j;
2057
            for (j = 0; j < XVECLEN (x, i); j++)
2058
              if (volatile_insn_p (XVECEXP (x, i, j)))
2059
                return 1;
2060
          }
2061
      }
2062
  }
2063
  return 0;
2064
}
2065
 
2066
/* Nonzero if X contains any volatile memory references
2067
   UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions.  */
2068
 
2069
int
2070
volatile_refs_p (const_rtx x)
2071
{
2072
  const RTX_CODE code = GET_CODE (x);
2073
  switch (code)
2074
    {
2075
    case LABEL_REF:
2076
    case SYMBOL_REF:
2077
    case CONST_INT:
2078
    case CONST:
2079
    case CONST_DOUBLE:
2080
    case CONST_FIXED:
2081
    case CONST_VECTOR:
2082
    case CC0:
2083
    case PC:
2084
    case REG:
2085
    case SCRATCH:
2086
    case CLOBBER:
2087
    case ADDR_VEC:
2088
    case ADDR_DIFF_VEC:
2089
      return 0;
2090
 
2091
    case UNSPEC_VOLATILE:
2092
      return 1;
2093
 
2094
    case MEM:
2095
    case ASM_INPUT:
2096
    case ASM_OPERANDS:
2097
      if (MEM_VOLATILE_P (x))
2098
        return 1;
2099
 
2100
    default:
2101
      break;
2102
    }
2103
 
2104
  /* Recursively scan the operands of this expression.  */
2105
 
2106
  {
2107
    const char *const fmt = GET_RTX_FORMAT (code);
2108
    int i;
2109
 
2110
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2111
      {
2112
        if (fmt[i] == 'e')
2113
          {
2114
            if (volatile_refs_p (XEXP (x, i)))
2115
              return 1;
2116
          }
2117
        else if (fmt[i] == 'E')
2118
          {
2119
            int j;
2120
            for (j = 0; j < XVECLEN (x, i); j++)
2121
              if (volatile_refs_p (XVECEXP (x, i, j)))
2122
                return 1;
2123
          }
2124
      }
2125
  }
2126
  return 0;
2127
}
2128
 
2129
/* Similar to above, except that it also rejects register pre- and post-
2130
   incrementing.  */
2131
 
2132
int
2133
side_effects_p (const_rtx x)
2134
{
2135
  const RTX_CODE code = GET_CODE (x);
2136
  switch (code)
2137
    {
2138
    case LABEL_REF:
2139
    case SYMBOL_REF:
2140
    case CONST_INT:
2141
    case CONST:
2142
    case CONST_DOUBLE:
2143
    case CONST_FIXED:
2144
    case CONST_VECTOR:
2145
    case CC0:
2146
    case PC:
2147
    case REG:
2148
    case SCRATCH:
2149
    case ADDR_VEC:
2150
    case ADDR_DIFF_VEC:
2151
    case VAR_LOCATION:
2152
      return 0;
2153
 
2154
    case CLOBBER:
2155
      /* Reject CLOBBER with a non-VOID mode.  These are made by combine.c
2156
         when some combination can't be done.  If we see one, don't think
2157
         that we can simplify the expression.  */
2158
      return (GET_MODE (x) != VOIDmode);
2159
 
2160
    case PRE_INC:
2161
    case PRE_DEC:
2162
    case POST_INC:
2163
    case POST_DEC:
2164
    case PRE_MODIFY:
2165
    case POST_MODIFY:
2166
    case CALL:
2167
    case UNSPEC_VOLATILE:
2168
 /* case TRAP_IF: This isn't clear yet.  */
2169
      return 1;
2170
 
2171
    case MEM:
2172
    case ASM_INPUT:
2173
    case ASM_OPERANDS:
2174
      if (MEM_VOLATILE_P (x))
2175
        return 1;
2176
 
2177
    default:
2178
      break;
2179
    }
2180
 
2181
  /* Recursively scan the operands of this expression.  */
2182
 
2183
  {
2184
    const char *fmt = GET_RTX_FORMAT (code);
2185
    int i;
2186
 
2187
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2188
      {
2189
        if (fmt[i] == 'e')
2190
          {
2191
            if (side_effects_p (XEXP (x, i)))
2192
              return 1;
2193
          }
2194
        else if (fmt[i] == 'E')
2195
          {
2196
            int j;
2197
            for (j = 0; j < XVECLEN (x, i); j++)
2198
              if (side_effects_p (XVECEXP (x, i, j)))
2199
                return 1;
2200
          }
2201
      }
2202
  }
2203
  return 0;
2204
}
2205
 
2206
/* Return nonzero if evaluating rtx X might cause a trap.
2207
   FLAGS controls how to consider MEMs.  A nonzero means the context
2208
   of the access may have changed from the original, such that the
2209
   address may have become invalid.  */
2210
 
2211
int
2212
may_trap_p_1 (const_rtx x, unsigned flags)
2213
{
2214
  int i;
2215
  enum rtx_code code;
2216
  const char *fmt;
2217
 
2218
  /* We make no distinction currently, but this function is part of
2219
     the internal target-hooks ABI so we keep the parameter as
2220
     "unsigned flags".  */
2221
  bool code_changed = flags != 0;
2222
 
2223
  if (x == 0)
2224
    return 0;
2225
  code = GET_CODE (x);
2226
  switch (code)
2227
    {
2228
      /* Handle these cases quickly.  */
2229
    case CONST_INT:
2230
    case CONST_DOUBLE:
2231
    case CONST_FIXED:
2232
    case CONST_VECTOR:
2233
    case SYMBOL_REF:
2234
    case LABEL_REF:
2235
    case CONST:
2236
    case PC:
2237
    case CC0:
2238
    case REG:
2239
    case SCRATCH:
2240
      return 0;
2241
 
2242
    case UNSPEC:
2243
    case UNSPEC_VOLATILE:
2244
      return targetm.unspec_may_trap_p (x, flags);
2245
 
2246
    case ASM_INPUT:
2247
    case TRAP_IF:
2248
      return 1;
2249
 
2250
    case ASM_OPERANDS:
2251
      return MEM_VOLATILE_P (x);
2252
 
2253
      /* Memory ref can trap unless it's a static var or a stack slot.  */
2254
    case MEM:
2255
      /* Recognize specific pattern of stack checking probes.  */
2256
      if (flag_stack_check
2257
          && MEM_VOLATILE_P (x)
2258
          && XEXP (x, 0) == stack_pointer_rtx)
2259
        return 1;
2260
      if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2261
             reference; moving it out of context such as when moving code
2262
             when optimizing, might cause its address to become invalid.  */
2263
          code_changed
2264
          || !MEM_NOTRAP_P (x))
2265
        {
2266
          HOST_WIDE_INT size = MEM_SIZE (x) ? INTVAL (MEM_SIZE (x)) : 0;
2267
          return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
2268
                                        GET_MODE (x), code_changed);
2269
        }
2270
 
2271
      return 0;
2272
 
2273
      /* Division by a non-constant might trap.  */
2274
    case DIV:
2275
    case MOD:
2276
    case UDIV:
2277
    case UMOD:
2278
      if (HONOR_SNANS (GET_MODE (x)))
2279
        return 1;
2280
      if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2281
        return flag_trapping_math;
2282
      if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2283
        return 1;
2284
      break;
2285
 
2286
    case EXPR_LIST:
2287
      /* An EXPR_LIST is used to represent a function call.  This
2288
         certainly may trap.  */
2289
      return 1;
2290
 
2291
    case GE:
2292
    case GT:
2293
    case LE:
2294
    case LT:
2295
    case LTGT:
2296
    case COMPARE:
2297
      /* Some floating point comparisons may trap.  */
2298
      if (!flag_trapping_math)
2299
        break;
2300
      /* ??? There is no machine independent way to check for tests that trap
2301
         when COMPARE is used, though many targets do make this distinction.
2302
         For instance, sparc uses CCFPE for compares which generate exceptions
2303
         and CCFP for compares which do not generate exceptions.  */
2304
      if (HONOR_NANS (GET_MODE (x)))
2305
        return 1;
2306
      /* But often the compare has some CC mode, so check operand
2307
         modes as well.  */
2308
      if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2309
          || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2310
        return 1;
2311
      break;
2312
 
2313
    case EQ:
2314
    case NE:
2315
      if (HONOR_SNANS (GET_MODE (x)))
2316
        return 1;
2317
      /* Often comparison is CC mode, so check operand modes.  */
2318
      if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2319
          || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2320
        return 1;
2321
      break;
2322
 
2323
    case FIX:
2324
      /* Conversion of floating point might trap.  */
2325
      if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2326
        return 1;
2327
      break;
2328
 
2329
    case NEG:
2330
    case ABS:
2331
    case SUBREG:
2332
      /* These operations don't trap even with floating point.  */
2333
      break;
2334
 
2335
    default:
2336
      /* Any floating arithmetic may trap.  */
2337
      if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2338
          && flag_trapping_math)
2339
        return 1;
2340
    }
2341
 
2342
  fmt = GET_RTX_FORMAT (code);
2343
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2344
    {
2345
      if (fmt[i] == 'e')
2346
        {
2347
          if (may_trap_p_1 (XEXP (x, i), flags))
2348
            return 1;
2349
        }
2350
      else if (fmt[i] == 'E')
2351
        {
2352
          int j;
2353
          for (j = 0; j < XVECLEN (x, i); j++)
2354
            if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2355
              return 1;
2356
        }
2357
    }
2358
  return 0;
2359
}
2360
 
2361
/* Return nonzero if evaluating rtx X might cause a trap.  */
2362
 
2363
int
2364
may_trap_p (const_rtx x)
2365
{
2366
  return may_trap_p_1 (x, 0);
2367
}
2368
 
2369
/* Same as above, but additionally return nonzero if evaluating rtx X might
2370
   cause a fault.  We define a fault for the purpose of this function as a
2371
   erroneous execution condition that cannot be encountered during the normal
2372
   execution of a valid program; the typical example is an unaligned memory
2373
   access on a strict alignment machine.  The compiler guarantees that it
2374
   doesn't generate code that will fault from a valid program, but this
2375
   guarantee doesn't mean anything for individual instructions.  Consider
2376
   the following example:
2377
 
2378
      struct S { int d; union { char *cp; int *ip; }; };
2379
 
2380
      int foo(struct S *s)
2381
      {
2382
        if (s->d == 1)
2383
          return *s->ip;
2384
        else
2385
          return *s->cp;
2386
      }
2387
 
2388
   on a strict alignment machine.  In a valid program, foo will never be
2389
   invoked on a structure for which d is equal to 1 and the underlying
2390
   unique field of the union not aligned on a 4-byte boundary, but the
2391
   expression *s->ip might cause a fault if considered individually.
2392
 
2393
   At the RTL level, potentially problematic expressions will almost always
2394
   verify may_trap_p; for example, the above dereference can be emitted as
2395
   (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2396
   However, suppose that foo is inlined in a caller that causes s->cp to
2397
   point to a local character variable and guarantees that s->d is not set
2398
   to 1; foo may have been effectively translated into pseudo-RTL as:
2399
 
2400
      if ((reg:SI) == 1)
2401
        (set (reg:SI) (mem:SI (%fp - 7)))
2402
      else
2403
        (set (reg:QI) (mem:QI (%fp - 7)))
2404
 
2405
   Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2406
   memory reference to a stack slot, but it will certainly cause a fault
2407
   on a strict alignment machine.  */
2408
 
2409
int
2410
may_trap_or_fault_p (const_rtx x)
2411
{
2412
  return may_trap_p_1 (x, 1);
2413
}
2414
 
2415
/* Return nonzero if X contains a comparison that is not either EQ or NE,
2416
   i.e., an inequality.  */
2417
 
2418
int
2419
inequality_comparisons_p (const_rtx x)
2420
{
2421
  const char *fmt;
2422
  int len, i;
2423
  const enum rtx_code code = GET_CODE (x);
2424
 
2425
  switch (code)
2426
    {
2427
    case REG:
2428
    case SCRATCH:
2429
    case PC:
2430
    case CC0:
2431
    case CONST_INT:
2432
    case CONST_DOUBLE:
2433
    case CONST_FIXED:
2434
    case CONST_VECTOR:
2435
    case CONST:
2436
    case LABEL_REF:
2437
    case SYMBOL_REF:
2438
      return 0;
2439
 
2440
    case LT:
2441
    case LTU:
2442
    case GT:
2443
    case GTU:
2444
    case LE:
2445
    case LEU:
2446
    case GE:
2447
    case GEU:
2448
      return 1;
2449
 
2450
    default:
2451
      break;
2452
    }
2453
 
2454
  len = GET_RTX_LENGTH (code);
2455
  fmt = GET_RTX_FORMAT (code);
2456
 
2457
  for (i = 0; i < len; i++)
2458
    {
2459
      if (fmt[i] == 'e')
2460
        {
2461
          if (inequality_comparisons_p (XEXP (x, i)))
2462
            return 1;
2463
        }
2464
      else if (fmt[i] == 'E')
2465
        {
2466
          int j;
2467
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2468
            if (inequality_comparisons_p (XVECEXP (x, i, j)))
2469
              return 1;
2470
        }
2471
    }
2472
 
2473
  return 0;
2474
}
2475
 
2476
/* Replace any occurrence of FROM in X with TO.  The function does
2477
   not enter into CONST_DOUBLE for the replace.
2478
 
2479
   Note that copying is not done so X must not be shared unless all copies
2480
   are to be modified.  */
2481
 
2482
rtx
2483
replace_rtx (rtx x, rtx from, rtx to)
2484
{
2485
  int i, j;
2486
  const char *fmt;
2487
 
2488
  /* The following prevents loops occurrence when we change MEM in
2489
     CONST_DOUBLE onto the same CONST_DOUBLE.  */
2490
  if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2491
    return x;
2492
 
2493
  if (x == from)
2494
    return to;
2495
 
2496
  /* Allow this function to make replacements in EXPR_LISTs.  */
2497
  if (x == 0)
2498
    return 0;
2499
 
2500
  if (GET_CODE (x) == SUBREG)
2501
    {
2502
      rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
2503
 
2504
      if (CONST_INT_P (new_rtx))
2505
        {
2506
          x = simplify_subreg (GET_MODE (x), new_rtx,
2507
                               GET_MODE (SUBREG_REG (x)),
2508
                               SUBREG_BYTE (x));
2509
          gcc_assert (x);
2510
        }
2511
      else
2512
        SUBREG_REG (x) = new_rtx;
2513
 
2514
      return x;
2515
    }
2516
  else if (GET_CODE (x) == ZERO_EXTEND)
2517
    {
2518
      rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
2519
 
2520
      if (CONST_INT_P (new_rtx))
2521
        {
2522
          x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2523
                                        new_rtx, GET_MODE (XEXP (x, 0)));
2524
          gcc_assert (x);
2525
        }
2526
      else
2527
        XEXP (x, 0) = new_rtx;
2528
 
2529
      return x;
2530
    }
2531
 
2532
  fmt = GET_RTX_FORMAT (GET_CODE (x));
2533
  for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2534
    {
2535
      if (fmt[i] == 'e')
2536
        XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2537
      else if (fmt[i] == 'E')
2538
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2539
          XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2540
    }
2541
 
2542
  return x;
2543
}
2544
 
2545
/* Replace occurrences of the old label in *X with the new one.
2546
   DATA is a REPLACE_LABEL_DATA containing the old and new labels.  */
2547
 
2548
int
2549
replace_label (rtx *x, void *data)
2550
{
2551
  rtx l = *x;
2552
  rtx old_label = ((replace_label_data *) data)->r1;
2553
  rtx new_label = ((replace_label_data *) data)->r2;
2554
  bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2555
 
2556
  if (l == NULL_RTX)
2557
    return 0;
2558
 
2559
  if (GET_CODE (l) == SYMBOL_REF
2560
      && CONSTANT_POOL_ADDRESS_P (l))
2561
    {
2562
      rtx c = get_pool_constant (l);
2563
      if (rtx_referenced_p (old_label, c))
2564
        {
2565
          rtx new_c, new_l;
2566
          replace_label_data *d = (replace_label_data *) data;
2567
 
2568
          /* Create a copy of constant C; replace the label inside
2569
             but do not update LABEL_NUSES because uses in constant pool
2570
             are not counted.  */
2571
          new_c = copy_rtx (c);
2572
          d->update_label_nuses = false;
2573
          for_each_rtx (&new_c, replace_label, data);
2574
          d->update_label_nuses = update_label_nuses;
2575
 
2576
          /* Add the new constant NEW_C to constant pool and replace
2577
             the old reference to constant by new reference.  */
2578
          new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2579
          *x = replace_rtx (l, l, new_l);
2580
        }
2581
      return 0;
2582
    }
2583
 
2584
  /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2585
     field.  This is not handled by for_each_rtx because it doesn't
2586
     handle unprinted ('0') fields.  */
2587
  if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2588
    JUMP_LABEL (l) = new_label;
2589
 
2590
  if ((GET_CODE (l) == LABEL_REF
2591
       || GET_CODE (l) == INSN_LIST)
2592
      && XEXP (l, 0) == old_label)
2593
    {
2594
      XEXP (l, 0) = new_label;
2595
      if (update_label_nuses)
2596
        {
2597
          ++LABEL_NUSES (new_label);
2598
          --LABEL_NUSES (old_label);
2599
        }
2600
      return 0;
2601
    }
2602
 
2603
  return 0;
2604
}
2605
 
2606
/* When *BODY is equal to X or X is directly referenced by *BODY
2607
   return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2608
   too, otherwise FOR_EACH_RTX continues traversing *BODY.  */
2609
 
2610
static int
2611
rtx_referenced_p_1 (rtx *body, void *x)
2612
{
2613
  rtx y = (rtx) x;
2614
 
2615
  if (*body == NULL_RTX)
2616
    return y == NULL_RTX;
2617
 
2618
  /* Return true if a label_ref *BODY refers to label Y.  */
2619
  if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2620
    return XEXP (*body, 0) == y;
2621
 
2622
  /* If *BODY is a reference to pool constant traverse the constant.  */
2623
  if (GET_CODE (*body) == SYMBOL_REF
2624
      && CONSTANT_POOL_ADDRESS_P (*body))
2625
    return rtx_referenced_p (y, get_pool_constant (*body));
2626
 
2627
  /* By default, compare the RTL expressions.  */
2628
  return rtx_equal_p (*body, y);
2629
}
2630
 
2631
/* Return true if X is referenced in BODY.  */
2632
 
2633
int
2634
rtx_referenced_p (rtx x, rtx body)
2635
{
2636
  return for_each_rtx (&body, rtx_referenced_p_1, x);
2637
}
2638
 
2639
/* If INSN is a tablejump return true and store the label (before jump table) to
2640
   *LABELP and the jump table to *TABLEP.  LABELP and TABLEP may be NULL.  */
2641
 
2642
bool
2643
tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
2644
{
2645
  rtx label, table;
2646
 
2647
  if (JUMP_P (insn)
2648
      && (label = JUMP_LABEL (insn)) != NULL_RTX
2649
      && (table = next_active_insn (label)) != NULL_RTX
2650
      && JUMP_TABLE_DATA_P (table))
2651
    {
2652
      if (labelp)
2653
        *labelp = label;
2654
      if (tablep)
2655
        *tablep = table;
2656
      return true;
2657
    }
2658
  return false;
2659
}
2660
 
2661
/* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2662
   constant that is not in the constant pool and not in the condition
2663
   of an IF_THEN_ELSE.  */
2664
 
2665
static int
2666
computed_jump_p_1 (const_rtx x)
2667
{
2668
  const enum rtx_code code = GET_CODE (x);
2669
  int i, j;
2670
  const char *fmt;
2671
 
2672
  switch (code)
2673
    {
2674
    case LABEL_REF:
2675
    case PC:
2676
      return 0;
2677
 
2678
    case CONST:
2679
    case CONST_INT:
2680
    case CONST_DOUBLE:
2681
    case CONST_FIXED:
2682
    case CONST_VECTOR:
2683
    case SYMBOL_REF:
2684
    case REG:
2685
      return 1;
2686
 
2687
    case MEM:
2688
      return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2689
                && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2690
 
2691
    case IF_THEN_ELSE:
2692
      return (computed_jump_p_1 (XEXP (x, 1))
2693
              || computed_jump_p_1 (XEXP (x, 2)));
2694
 
2695
    default:
2696
      break;
2697
    }
2698
 
2699
  fmt = GET_RTX_FORMAT (code);
2700
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2701
    {
2702
      if (fmt[i] == 'e'
2703
          && computed_jump_p_1 (XEXP (x, i)))
2704
        return 1;
2705
 
2706
      else if (fmt[i] == 'E')
2707
        for (j = 0; j < XVECLEN (x, i); j++)
2708
          if (computed_jump_p_1 (XVECEXP (x, i, j)))
2709
            return 1;
2710
    }
2711
 
2712
  return 0;
2713
}
2714
 
2715
/* Return nonzero if INSN is an indirect jump (aka computed jump).
2716
 
2717
   Tablejumps and casesi insns are not considered indirect jumps;
2718
   we can recognize them by a (use (label_ref)).  */
2719
 
2720
int
2721
computed_jump_p (const_rtx insn)
2722
{
2723
  int i;
2724
  if (JUMP_P (insn))
2725
    {
2726
      rtx pat = PATTERN (insn);
2727
 
2728
      /* If we have a JUMP_LABEL set, we're not a computed jump.  */
2729
      if (JUMP_LABEL (insn) != NULL)
2730
        return 0;
2731
 
2732
      if (GET_CODE (pat) == PARALLEL)
2733
        {
2734
          int len = XVECLEN (pat, 0);
2735
          int has_use_labelref = 0;
2736
 
2737
          for (i = len - 1; i >= 0; i--)
2738
            if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2739
                && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2740
                    == LABEL_REF))
2741
              has_use_labelref = 1;
2742
 
2743
          if (! has_use_labelref)
2744
            for (i = len - 1; i >= 0; i--)
2745
              if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2746
                  && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2747
                  && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2748
                return 1;
2749
        }
2750
      else if (GET_CODE (pat) == SET
2751
               && SET_DEST (pat) == pc_rtx
2752
               && computed_jump_p_1 (SET_SRC (pat)))
2753
        return 1;
2754
    }
2755
  return 0;
2756
}
2757
 
2758
/* Optimized loop of for_each_rtx, trying to avoid useless recursive
2759
   calls.  Processes the subexpressions of EXP and passes them to F.  */
2760
static int
2761
for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2762
{
2763
  int result, i, j;
2764
  const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2765
  rtx *x;
2766
 
2767
  for (; format[n] != '\0'; n++)
2768
    {
2769
      switch (format[n])
2770
        {
2771
        case 'e':
2772
          /* Call F on X.  */
2773
          x = &XEXP (exp, n);
2774
          result = (*f) (x, data);
2775
          if (result == -1)
2776
            /* Do not traverse sub-expressions.  */
2777
            continue;
2778
          else if (result != 0)
2779
            /* Stop the traversal.  */
2780
            return result;
2781
 
2782
          if (*x == NULL_RTX)
2783
            /* There are no sub-expressions.  */
2784
            continue;
2785
 
2786
          i = non_rtx_starting_operands[GET_CODE (*x)];
2787
          if (i >= 0)
2788
            {
2789
              result = for_each_rtx_1 (*x, i, f, data);
2790
              if (result != 0)
2791
                return result;
2792
            }
2793
          break;
2794
 
2795
        case 'V':
2796
        case 'E':
2797
          if (XVEC (exp, n) == 0)
2798
            continue;
2799
          for (j = 0; j < XVECLEN (exp, n); ++j)
2800
            {
2801
              /* Call F on X.  */
2802
              x = &XVECEXP (exp, n, j);
2803
              result = (*f) (x, data);
2804
              if (result == -1)
2805
                /* Do not traverse sub-expressions.  */
2806
                continue;
2807
              else if (result != 0)
2808
                /* Stop the traversal.  */
2809
                return result;
2810
 
2811
              if (*x == NULL_RTX)
2812
                /* There are no sub-expressions.  */
2813
                continue;
2814
 
2815
              i = non_rtx_starting_operands[GET_CODE (*x)];
2816
              if (i >= 0)
2817
                {
2818
                  result = for_each_rtx_1 (*x, i, f, data);
2819
                  if (result != 0)
2820
                    return result;
2821
                }
2822
            }
2823
          break;
2824
 
2825
        default:
2826
          /* Nothing to do.  */
2827
          break;
2828
        }
2829
    }
2830
 
2831
  return 0;
2832
}
2833
 
2834
/* Traverse X via depth-first search, calling F for each
2835
   sub-expression (including X itself).  F is also passed the DATA.
2836
   If F returns -1, do not traverse sub-expressions, but continue
2837
   traversing the rest of the tree.  If F ever returns any other
2838
   nonzero value, stop the traversal, and return the value returned
2839
   by F.  Otherwise, return 0.  This function does not traverse inside
2840
   tree structure that contains RTX_EXPRs, or into sub-expressions
2841
   whose format code is `0' since it is not known whether or not those
2842
   codes are actually RTL.
2843
 
2844
   This routine is very general, and could (should?) be used to
2845
   implement many of the other routines in this file.  */
2846
 
2847
int
2848
for_each_rtx (rtx *x, rtx_function f, void *data)
2849
{
2850
  int result;
2851
  int i;
2852
 
2853
  /* Call F on X.  */
2854
  result = (*f) (x, data);
2855
  if (result == -1)
2856
    /* Do not traverse sub-expressions.  */
2857
    return 0;
2858
  else if (result != 0)
2859
    /* Stop the traversal.  */
2860
    return result;
2861
 
2862
  if (*x == NULL_RTX)
2863
    /* There are no sub-expressions.  */
2864
    return 0;
2865
 
2866
  i = non_rtx_starting_operands[GET_CODE (*x)];
2867
  if (i < 0)
2868
    return 0;
2869
 
2870
  return for_each_rtx_1 (*x, i, f, data);
2871
}
2872
 
2873
 
2874
/* Searches X for any reference to REGNO, returning the rtx of the
2875
   reference found if any.  Otherwise, returns NULL_RTX.  */
2876
 
2877
rtx
2878
regno_use_in (unsigned int regno, rtx x)
2879
{
2880
  const char *fmt;
2881
  int i, j;
2882
  rtx tem;
2883
 
2884
  if (REG_P (x) && REGNO (x) == regno)
2885
    return x;
2886
 
2887
  fmt = GET_RTX_FORMAT (GET_CODE (x));
2888
  for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2889
    {
2890
      if (fmt[i] == 'e')
2891
        {
2892
          if ((tem = regno_use_in (regno, XEXP (x, i))))
2893
            return tem;
2894
        }
2895
      else if (fmt[i] == 'E')
2896
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2897
          if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2898
            return tem;
2899
    }
2900
 
2901
  return NULL_RTX;
2902
}
2903
 
2904
/* Return a value indicating whether OP, an operand of a commutative
2905
   operation, is preferred as the first or second operand.  The higher
2906
   the value, the stronger the preference for being the first operand.
2907
   We use negative values to indicate a preference for the first operand
2908
   and positive values for the second operand.  */
2909
 
2910
int
2911
commutative_operand_precedence (rtx op)
2912
{
2913
  enum rtx_code code = GET_CODE (op);
2914
 
2915
  /* Constants always come the second operand.  Prefer "nice" constants.  */
2916
  if (code == CONST_INT)
2917
    return -8;
2918
  if (code == CONST_DOUBLE)
2919
    return -7;
2920
  if (code == CONST_FIXED)
2921
    return -7;
2922
  op = avoid_constant_pool_reference (op);
2923
  code = GET_CODE (op);
2924
 
2925
  switch (GET_RTX_CLASS (code))
2926
    {
2927
    case RTX_CONST_OBJ:
2928
      if (code == CONST_INT)
2929
        return -6;
2930
      if (code == CONST_DOUBLE)
2931
        return -5;
2932
      if (code == CONST_FIXED)
2933
        return -5;
2934
      return -4;
2935
 
2936
    case RTX_EXTRA:
2937
      /* SUBREGs of objects should come second.  */
2938
      if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2939
        return -3;
2940
      return 0;
2941
 
2942
    case RTX_OBJ:
2943
      /* Complex expressions should be the first, so decrease priority
2944
         of objects.  Prefer pointer objects over non pointer objects.  */
2945
      if ((REG_P (op) && REG_POINTER (op))
2946
          || (MEM_P (op) && MEM_POINTER (op)))
2947
        return -1;
2948
      return -2;
2949
 
2950
    case RTX_COMM_ARITH:
2951
      /* Prefer operands that are themselves commutative to be first.
2952
         This helps to make things linear.  In particular,
2953
         (and (and (reg) (reg)) (not (reg))) is canonical.  */
2954
      return 4;
2955
 
2956
    case RTX_BIN_ARITH:
2957
      /* If only one operand is a binary expression, it will be the first
2958
         operand.  In particular,  (plus (minus (reg) (reg)) (neg (reg)))
2959
         is canonical, although it will usually be further simplified.  */
2960
      return 2;
2961
 
2962
    case RTX_UNARY:
2963
      /* Then prefer NEG and NOT.  */
2964
      if (code == NEG || code == NOT)
2965
        return 1;
2966
 
2967
    default:
2968
      return 0;
2969
    }
2970
}
2971
 
2972
/* Return 1 iff it is necessary to swap operands of commutative operation
2973
   in order to canonicalize expression.  */
2974
 
2975
bool
2976
swap_commutative_operands_p (rtx x, rtx y)
2977
{
2978
  return (commutative_operand_precedence (x)
2979
          < commutative_operand_precedence (y));
2980
}
2981
 
2982
/* Return 1 if X is an autoincrement side effect and the register is
2983
   not the stack pointer.  */
2984
int
2985
auto_inc_p (const_rtx x)
2986
{
2987
  switch (GET_CODE (x))
2988
    {
2989
    case PRE_INC:
2990
    case POST_INC:
2991
    case PRE_DEC:
2992
    case POST_DEC:
2993
    case PRE_MODIFY:
2994
    case POST_MODIFY:
2995
      /* There are no REG_INC notes for SP.  */
2996
      if (XEXP (x, 0) != stack_pointer_rtx)
2997
        return 1;
2998
    default:
2999
      break;
3000
    }
3001
  return 0;
3002
}
3003
 
3004
/* Return nonzero if IN contains a piece of rtl that has the address LOC.  */
3005
int
3006
loc_mentioned_in_p (rtx *loc, const_rtx in)
3007
{
3008
  enum rtx_code code;
3009
  const char *fmt;
3010
  int i, j;
3011
 
3012
  if (!in)
3013
    return 0;
3014
 
3015
  code = GET_CODE (in);
3016
  fmt = GET_RTX_FORMAT (code);
3017
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3018
    {
3019
      if (fmt[i] == 'e')
3020
        {
3021
          if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
3022
            return 1;
3023
        }
3024
      else if (fmt[i] == 'E')
3025
        for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3026
          if (loc == &XVECEXP (in, i, j)
3027
              || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3028
            return 1;
3029
    }
3030
  return 0;
3031
}
3032
 
3033
/* Helper function for subreg_lsb.  Given a subreg's OUTER_MODE, INNER_MODE,
3034
   and SUBREG_BYTE, return the bit offset where the subreg begins
3035
   (counting from the least significant bit of the operand).  */
3036
 
3037
unsigned int
3038
subreg_lsb_1 (enum machine_mode outer_mode,
3039
              enum machine_mode inner_mode,
3040
              unsigned int subreg_byte)
3041
{
3042
  unsigned int bitpos;
3043
  unsigned int byte;
3044
  unsigned int word;
3045
 
3046
  /* A paradoxical subreg begins at bit position 0.  */
3047
  if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3048
    return 0;
3049
 
3050
  if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3051
    /* If the subreg crosses a word boundary ensure that
3052
       it also begins and ends on a word boundary.  */
3053
    gcc_assert (!((subreg_byte % UNITS_PER_WORD
3054
                  + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3055
                  && (subreg_byte % UNITS_PER_WORD
3056
                      || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3057
 
3058
  if (WORDS_BIG_ENDIAN)
3059
    word = (GET_MODE_SIZE (inner_mode)
3060
            - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3061
  else
3062
    word = subreg_byte / UNITS_PER_WORD;
3063
  bitpos = word * BITS_PER_WORD;
3064
 
3065
  if (BYTES_BIG_ENDIAN)
3066
    byte = (GET_MODE_SIZE (inner_mode)
3067
            - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3068
  else
3069
    byte = subreg_byte % UNITS_PER_WORD;
3070
  bitpos += byte * BITS_PER_UNIT;
3071
 
3072
  return bitpos;
3073
}
3074
 
3075
/* Given a subreg X, return the bit offset where the subreg begins
3076
   (counting from the least significant bit of the reg).  */
3077
 
3078
unsigned int
3079
subreg_lsb (const_rtx x)
3080
{
3081
  return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3082
                       SUBREG_BYTE (x));
3083
}
3084
 
3085
/* Fill in information about a subreg of a hard register.
3086
   xregno - A regno of an inner hard subreg_reg (or what will become one).
3087
   xmode  - The mode of xregno.
3088
   offset - The byte offset.
3089
   ymode  - The mode of a top level SUBREG (or what may become one).
3090
   info   - Pointer to structure to fill in.  */
3091
void
3092
subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3093
                 unsigned int offset, enum machine_mode ymode,
3094
                 struct subreg_info *info)
3095
{
3096
  int nregs_xmode, nregs_ymode;
3097
  int mode_multiple, nregs_multiple;
3098
  int offset_adj, y_offset, y_offset_adj;
3099
  int regsize_xmode, regsize_ymode;
3100
  bool rknown;
3101
 
3102
  gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3103
 
3104
  rknown = false;
3105
 
3106
  /* If there are holes in a non-scalar mode in registers, we expect
3107
     that it is made up of its units concatenated together.  */
3108
  if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3109
    {
3110
      enum machine_mode xmode_unit;
3111
 
3112
      nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3113
      if (GET_MODE_INNER (xmode) == VOIDmode)
3114
        xmode_unit = xmode;
3115
      else
3116
        xmode_unit = GET_MODE_INNER (xmode);
3117
      gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3118
      gcc_assert (nregs_xmode
3119
                  == (GET_MODE_NUNITS (xmode)
3120
                      * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3121
      gcc_assert (hard_regno_nregs[xregno][xmode]
3122
                  == (hard_regno_nregs[xregno][xmode_unit]
3123
                      * GET_MODE_NUNITS (xmode)));
3124
 
3125
      /* You can only ask for a SUBREG of a value with holes in the middle
3126
         if you don't cross the holes.  (Such a SUBREG should be done by
3127
         picking a different register class, or doing it in memory if
3128
         necessary.)  An example of a value with holes is XCmode on 32-bit
3129
         x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3130
         3 for each part, but in memory it's two 128-bit parts.
3131
         Padding is assumed to be at the end (not necessarily the 'high part')
3132
         of each unit.  */
3133
      if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3134
           < GET_MODE_NUNITS (xmode))
3135
          && (offset / GET_MODE_SIZE (xmode_unit)
3136
              != ((offset + GET_MODE_SIZE (ymode) - 1)
3137
                  / GET_MODE_SIZE (xmode_unit))))
3138
        {
3139
          info->representable_p = false;
3140
          rknown = true;
3141
        }
3142
    }
3143
  else
3144
    nregs_xmode = hard_regno_nregs[xregno][xmode];
3145
 
3146
  nregs_ymode = hard_regno_nregs[xregno][ymode];
3147
 
3148
  /* Paradoxical subregs are otherwise valid.  */
3149
  if (!rknown
3150
      && offset == 0
3151
      && GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode))
3152
    {
3153
      info->representable_p = true;
3154
      /* If this is a big endian paradoxical subreg, which uses more
3155
         actual hard registers than the original register, we must
3156
         return a negative offset so that we find the proper highpart
3157
         of the register.  */
3158
      if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3159
          ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3160
        info->offset = nregs_xmode - nregs_ymode;
3161
      else
3162
        info->offset = 0;
3163
      info->nregs = nregs_ymode;
3164
      return;
3165
    }
3166
 
3167
  /* If registers store different numbers of bits in the different
3168
     modes, we cannot generally form this subreg.  */
3169
  if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3170
      && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3171
      && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3172
      && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
3173
    {
3174
      regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3175
      regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3176
      if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3177
        {
3178
          info->representable_p = false;
3179
          info->nregs
3180
            = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3181
          info->offset = offset / regsize_xmode;
3182
          return;
3183
        }
3184
      if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3185
        {
3186
          info->representable_p = false;
3187
          info->nregs
3188
            = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3189
          info->offset = offset / regsize_xmode;
3190
          return;
3191
        }
3192
    }
3193
 
3194
  /* Lowpart subregs are otherwise valid.  */
3195
  if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3196
    {
3197
      info->representable_p = true;
3198
      rknown = true;
3199
 
3200
      if (offset == 0 || nregs_xmode == nregs_ymode)
3201
        {
3202
          info->offset = 0;
3203
          info->nregs = nregs_ymode;
3204
          return;
3205
        }
3206
    }
3207
 
3208
  /* This should always pass, otherwise we don't know how to verify
3209
     the constraint.  These conditions may be relaxed but
3210
     subreg_regno_offset would need to be redesigned.  */
3211
  gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3212
  gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3213
 
3214
  /* The XMODE value can be seen as a vector of NREGS_XMODE
3215
     values.  The subreg must represent a lowpart of given field.
3216
     Compute what field it is.  */
3217
  offset_adj = offset;
3218
  offset_adj -= subreg_lowpart_offset (ymode,
3219
                                       mode_for_size (GET_MODE_BITSIZE (xmode)
3220
                                                      / nregs_xmode,
3221
                                                      MODE_INT, 0));
3222
 
3223
  /* Size of ymode must not be greater than the size of xmode.  */
3224
  mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3225
  gcc_assert (mode_multiple != 0);
3226
 
3227
  y_offset = offset / GET_MODE_SIZE (ymode);
3228
  y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3229
  nregs_multiple = nregs_xmode / nregs_ymode;
3230
 
3231
  gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
3232
  gcc_assert ((mode_multiple % nregs_multiple) == 0);
3233
 
3234
  if (!rknown)
3235
    {
3236
      info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3237
      rknown = true;
3238
    }
3239
  info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3240
  info->nregs = nregs_ymode;
3241
}
3242
 
3243
/* This function returns the regno offset of a subreg expression.
3244
   xregno - A regno of an inner hard subreg_reg (or what will become one).
3245
   xmode  - The mode of xregno.
3246
   offset - The byte offset.
3247
   ymode  - The mode of a top level SUBREG (or what may become one).
3248
   RETURN - The regno offset which would be used.  */
3249
unsigned int
3250
subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3251
                     unsigned int offset, enum machine_mode ymode)
3252
{
3253
  struct subreg_info info;
3254
  subreg_get_info (xregno, xmode, offset, ymode, &info);
3255
  return info.offset;
3256
}
3257
 
3258
/* This function returns true when the offset is representable via
3259
   subreg_offset in the given regno.
3260
   xregno - A regno of an inner hard subreg_reg (or what will become one).
3261
   xmode  - The mode of xregno.
3262
   offset - The byte offset.
3263
   ymode  - The mode of a top level SUBREG (or what may become one).
3264
   RETURN - Whether the offset is representable.  */
3265
bool
3266
subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3267
                               unsigned int offset, enum machine_mode ymode)
3268
{
3269
  struct subreg_info info;
3270
  subreg_get_info (xregno, xmode, offset, ymode, &info);
3271
  return info.representable_p;
3272
}
3273
 
3274
/* Return the number of a YMODE register to which
3275
 
3276
       (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
3277
 
3278
   can be simplified.  Return -1 if the subreg can't be simplified.
3279
 
3280
   XREGNO is a hard register number.  */
3281
 
3282
int
3283
simplify_subreg_regno (unsigned int xregno, enum machine_mode xmode,
3284
                       unsigned int offset, enum machine_mode ymode)
3285
{
3286
  struct subreg_info info;
3287
  unsigned int yregno;
3288
 
3289
#ifdef CANNOT_CHANGE_MODE_CLASS
3290
  /* Give the backend a chance to disallow the mode change.  */
3291
  if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
3292
      && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
3293
      && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode))
3294
    return -1;
3295
#endif
3296
 
3297
  /* We shouldn't simplify stack-related registers.  */
3298
  if ((!reload_completed || frame_pointer_needed)
3299
      && (xregno == FRAME_POINTER_REGNUM
3300
          || xregno == HARD_FRAME_POINTER_REGNUM))
3301
    return -1;
3302
 
3303
  if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3304
      && xregno == ARG_POINTER_REGNUM)
3305
    return -1;
3306
 
3307
  if (xregno == STACK_POINTER_REGNUM)
3308
    return -1;
3309
 
3310
  /* Try to get the register offset.  */
3311
  subreg_get_info (xregno, xmode, offset, ymode, &info);
3312
  if (!info.representable_p)
3313
    return -1;
3314
 
3315
  /* Make sure that the offsetted register value is in range.  */
3316
  yregno = xregno + info.offset;
3317
  if (!HARD_REGISTER_NUM_P (yregno))
3318
    return -1;
3319
 
3320
  /* See whether (reg:YMODE YREGNO) is valid.
3321
 
3322
     ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
3323
     This is a kludge to work around how float/complex arguments are passed
3324
     on 32-bit SPARC and should be fixed.  */
3325
  if (!HARD_REGNO_MODE_OK (yregno, ymode)
3326
      && HARD_REGNO_MODE_OK (xregno, xmode))
3327
    return -1;
3328
 
3329
  return (int) yregno;
3330
}
3331
 
3332
/* Return the final regno that a subreg expression refers to.  */
3333
unsigned int
3334
subreg_regno (const_rtx x)
3335
{
3336
  unsigned int ret;
3337
  rtx subreg = SUBREG_REG (x);
3338
  int regno = REGNO (subreg);
3339
 
3340
  ret = regno + subreg_regno_offset (regno,
3341
                                     GET_MODE (subreg),
3342
                                     SUBREG_BYTE (x),
3343
                                     GET_MODE (x));
3344
  return ret;
3345
 
3346
}
3347
 
3348
/* Return the number of registers that a subreg expression refers
3349
   to.  */
3350
unsigned int
3351
subreg_nregs (const_rtx x)
3352
{
3353
  return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3354
}
3355
 
3356
/* Return the number of registers that a subreg REG with REGNO
3357
   expression refers to.  This is a copy of the rtlanal.c:subreg_nregs
3358
   changed so that the regno can be passed in. */
3359
 
3360
unsigned int
3361
subreg_nregs_with_regno (unsigned int regno, const_rtx x)
3362
{
3363
  struct subreg_info info;
3364
  rtx subreg = SUBREG_REG (x);
3365
 
3366
  subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3367
                   &info);
3368
  return info.nregs;
3369
}
3370
 
3371
 
3372
struct parms_set_data
3373
{
3374
  int nregs;
3375
  HARD_REG_SET regs;
3376
};
3377
 
3378
/* Helper function for noticing stores to parameter registers.  */
3379
static void
3380
parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
3381
{
3382
  struct parms_set_data *const d = (struct parms_set_data *) data;
3383
  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3384
      && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3385
    {
3386
      CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3387
      d->nregs--;
3388
    }
3389
}
3390
 
3391
/* Look backward for first parameter to be loaded.
3392
   Note that loads of all parameters will not necessarily be
3393
   found if CSE has eliminated some of them (e.g., an argument
3394
   to the outer function is passed down as a parameter).
3395
   Do not skip BOUNDARY.  */
3396
rtx
3397
find_first_parameter_load (rtx call_insn, rtx boundary)
3398
{
3399
  struct parms_set_data parm;
3400
  rtx p, before, first_set;
3401
 
3402
  /* Since different machines initialize their parameter registers
3403
     in different orders, assume nothing.  Collect the set of all
3404
     parameter registers.  */
3405
  CLEAR_HARD_REG_SET (parm.regs);
3406
  parm.nregs = 0;
3407
  for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3408
    if (GET_CODE (XEXP (p, 0)) == USE
3409
        && REG_P (XEXP (XEXP (p, 0), 0)))
3410
      {
3411
        gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3412
 
3413
        /* We only care about registers which can hold function
3414
           arguments.  */
3415
        if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3416
          continue;
3417
 
3418
        SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3419
        parm.nregs++;
3420
      }
3421
  before = call_insn;
3422
  first_set = call_insn;
3423
 
3424
  /* Search backward for the first set of a register in this set.  */
3425
  while (parm.nregs && before != boundary)
3426
    {
3427
      before = PREV_INSN (before);
3428
 
3429
      /* It is possible that some loads got CSEed from one call to
3430
         another.  Stop in that case.  */
3431
      if (CALL_P (before))
3432
        break;
3433
 
3434
      /* Our caller needs either ensure that we will find all sets
3435
         (in case code has not been optimized yet), or take care
3436
         for possible labels in a way by setting boundary to preceding
3437
         CODE_LABEL.  */
3438
      if (LABEL_P (before))
3439
        {
3440
          gcc_assert (before == boundary);
3441
          break;
3442
        }
3443
 
3444
      if (INSN_P (before))
3445
        {
3446
          int nregs_old = parm.nregs;
3447
          note_stores (PATTERN (before), parms_set, &parm);
3448
          /* If we found something that did not set a parameter reg,
3449
             we're done.  Do not keep going, as that might result
3450
             in hoisting an insn before the setting of a pseudo
3451
             that is used by the hoisted insn. */
3452
          if (nregs_old != parm.nregs)
3453
            first_set = before;
3454
          else
3455
            break;
3456
        }
3457
    }
3458
  return first_set;
3459
}
3460
 
3461
/* Return true if we should avoid inserting code between INSN and preceding
3462
   call instruction.  */
3463
 
3464
bool
3465
keep_with_call_p (const_rtx insn)
3466
{
3467
  rtx set;
3468
 
3469
  if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3470
    {
3471
      if (REG_P (SET_DEST (set))
3472
          && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3473
          && fixed_regs[REGNO (SET_DEST (set))]
3474
          && general_operand (SET_SRC (set), VOIDmode))
3475
        return true;
3476
      if (REG_P (SET_SRC (set))
3477
          && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3478
          && REG_P (SET_DEST (set))
3479
          && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3480
        return true;
3481
      /* There may be a stack pop just after the call and before the store
3482
         of the return register.  Search for the actual store when deciding
3483
         if we can break or not.  */
3484
      if (SET_DEST (set) == stack_pointer_rtx)
3485
        {
3486
          /* This CONST_CAST is okay because next_nonnote_insn just
3487
             returns its argument and we assign it to a const_rtx
3488
             variable.  */
3489
          const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX(insn));
3490
          if (i2 && keep_with_call_p (i2))
3491
            return true;
3492
        }
3493
    }
3494
  return false;
3495
}
3496
 
3497
/* Return true if LABEL is a target of JUMP_INSN.  This applies only
3498
   to non-complex jumps.  That is, direct unconditional, conditional,
3499
   and tablejumps, but not computed jumps or returns.  It also does
3500
   not apply to the fallthru case of a conditional jump.  */
3501
 
3502
bool
3503
label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
3504
{
3505
  rtx tmp = JUMP_LABEL (jump_insn);
3506
 
3507
  if (label == tmp)
3508
    return true;
3509
 
3510
  if (tablejump_p (jump_insn, NULL, &tmp))
3511
    {
3512
      rtvec vec = XVEC (PATTERN (tmp),
3513
                        GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3514
      int i, veclen = GET_NUM_ELEM (vec);
3515
 
3516
      for (i = 0; i < veclen; ++i)
3517
        if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3518
          return true;
3519
    }
3520
 
3521
  if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3522
    return true;
3523
 
3524
  return false;
3525
}
3526
 
3527
 
3528
/* Return an estimate of the cost of computing rtx X.
3529
   One use is in cse, to decide which expression to keep in the hash table.
3530
   Another is in rtl generation, to pick the cheapest way to multiply.
3531
   Other uses like the latter are expected in the future.
3532
 
3533
   SPEED parameter specify whether costs optimized for speed or size should
3534
   be returned.  */
3535
 
3536
int
3537
rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED, bool speed)
3538
{
3539
  int i, j;
3540
  enum rtx_code code;
3541
  const char *fmt;
3542
  int total;
3543
 
3544
  if (x == 0)
3545
    return 0;
3546
 
3547
  /* Compute the default costs of certain things.
3548
     Note that targetm.rtx_costs can override the defaults.  */
3549
 
3550
  code = GET_CODE (x);
3551
  switch (code)
3552
    {
3553
    case MULT:
3554
      total = COSTS_N_INSNS (5);
3555
      break;
3556
    case DIV:
3557
    case UDIV:
3558
    case MOD:
3559
    case UMOD:
3560
      total = COSTS_N_INSNS (7);
3561
      break;
3562
    case USE:
3563
      /* Used in combine.c as a marker.  */
3564
      total = 0;
3565
      break;
3566
    default:
3567
      total = COSTS_N_INSNS (1);
3568
    }
3569
 
3570
  switch (code)
3571
    {
3572
    case REG:
3573
      return 0;
3574
 
3575
    case SUBREG:
3576
      total = 0;
3577
      /* If we can't tie these modes, make this expensive.  The larger
3578
         the mode, the more expensive it is.  */
3579
      if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3580
        return COSTS_N_INSNS (2
3581
                              + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3582
      break;
3583
 
3584
    default:
3585
      if (targetm.rtx_costs (x, code, outer_code, &total, speed))
3586
        return total;
3587
      break;
3588
    }
3589
 
3590
  /* Sum the costs of the sub-rtx's, plus cost of this operation,
3591
     which is already in total.  */
3592
 
3593
  fmt = GET_RTX_FORMAT (code);
3594
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3595
    if (fmt[i] == 'e')
3596
      total += rtx_cost (XEXP (x, i), code, speed);
3597
    else if (fmt[i] == 'E')
3598
      for (j = 0; j < XVECLEN (x, i); j++)
3599
        total += rtx_cost (XVECEXP (x, i, j), code, speed);
3600
 
3601
  return total;
3602
}
3603
 
3604
/* Return cost of address expression X.
3605
   Expect that X is properly formed address reference.
3606
 
3607
   SPEED parameter specify whether costs optimized for speed or size should
3608
   be returned.  */
3609
 
3610
int
3611
address_cost (rtx x, enum machine_mode mode, addr_space_t as, bool speed)
3612
{
3613
  /* We may be asked for cost of various unusual addresses, such as operands
3614
     of push instruction.  It is not worthwhile to complicate writing
3615
     of the target hook by such cases.  */
3616
 
3617
  if (!memory_address_addr_space_p (mode, x, as))
3618
    return 1000;
3619
 
3620
  return targetm.address_cost (x, speed);
3621
}
3622
 
3623
/* If the target doesn't override, compute the cost as with arithmetic.  */
3624
 
3625
int
3626
default_address_cost (rtx x, bool speed)
3627
{
3628
  return rtx_cost (x, MEM, speed);
3629
}
3630
 
3631
 
3632
unsigned HOST_WIDE_INT
3633
nonzero_bits (const_rtx x, enum machine_mode mode)
3634
{
3635
  return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3636
}
3637
 
3638
unsigned int
3639
num_sign_bit_copies (const_rtx x, enum machine_mode mode)
3640
{
3641
  return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3642
}
3643
 
3644
/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3645
   It avoids exponential behavior in nonzero_bits1 when X has
3646
   identical subexpressions on the first or the second level.  */
3647
 
3648
static unsigned HOST_WIDE_INT
3649
cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
3650
                     enum machine_mode known_mode,
3651
                     unsigned HOST_WIDE_INT known_ret)
3652
{
3653
  if (x == known_x && mode == known_mode)
3654
    return known_ret;
3655
 
3656
  /* Try to find identical subexpressions.  If found call
3657
     nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3658
     precomputed value for the subexpression as KNOWN_RET.  */
3659
 
3660
  if (ARITHMETIC_P (x))
3661
    {
3662
      rtx x0 = XEXP (x, 0);
3663
      rtx x1 = XEXP (x, 1);
3664
 
3665
      /* Check the first level.  */
3666
      if (x0 == x1)
3667
        return nonzero_bits1 (x, mode, x0, mode,
3668
                              cached_nonzero_bits (x0, mode, known_x,
3669
                                                   known_mode, known_ret));
3670
 
3671
      /* Check the second level.  */
3672
      if (ARITHMETIC_P (x0)
3673
          && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3674
        return nonzero_bits1 (x, mode, x1, mode,
3675
                              cached_nonzero_bits (x1, mode, known_x,
3676
                                                   known_mode, known_ret));
3677
 
3678
      if (ARITHMETIC_P (x1)
3679
          && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3680
        return nonzero_bits1 (x, mode, x0, mode,
3681
                              cached_nonzero_bits (x0, mode, known_x,
3682
                                                   known_mode, known_ret));
3683
    }
3684
 
3685
  return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3686
}
3687
 
3688
/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3689
   We don't let nonzero_bits recur into num_sign_bit_copies, because that
3690
   is less useful.  We can't allow both, because that results in exponential
3691
   run time recursion.  There is a nullstone testcase that triggered
3692
   this.  This macro avoids accidental uses of num_sign_bit_copies.  */
3693
#define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3694
 
3695
/* Given an expression, X, compute which bits in X can be nonzero.
3696
   We don't care about bits outside of those defined in MODE.
3697
 
3698
   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3699
   an arithmetic operation, we can do better.  */
3700
 
3701
static unsigned HOST_WIDE_INT
3702
nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
3703
               enum machine_mode known_mode,
3704
               unsigned HOST_WIDE_INT known_ret)
3705
{
3706
  unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3707
  unsigned HOST_WIDE_INT inner_nz;
3708
  enum rtx_code code;
3709
  unsigned int mode_width = GET_MODE_BITSIZE (mode);
3710
 
3711
  /* For floating-point and vector values, assume all bits are needed.  */
3712
  if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)
3713
      || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
3714
    return nonzero;
3715
 
3716
  /* If X is wider than MODE, use its mode instead.  */
3717
  if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3718
    {
3719
      mode = GET_MODE (x);
3720
      nonzero = GET_MODE_MASK (mode);
3721
      mode_width = GET_MODE_BITSIZE (mode);
3722
    }
3723
 
3724
  if (mode_width > HOST_BITS_PER_WIDE_INT)
3725
    /* Our only callers in this case look for single bit values.  So
3726
       just return the mode mask.  Those tests will then be false.  */
3727
    return nonzero;
3728
 
3729
#ifndef WORD_REGISTER_OPERATIONS
3730
  /* If MODE is wider than X, but both are a single word for both the host
3731
     and target machines, we can compute this from which bits of the
3732
     object might be nonzero in its own mode, taking into account the fact
3733
     that on many CISC machines, accessing an object in a wider mode
3734
     causes the high-order bits to become undefined.  So they are
3735
     not known to be zero.  */
3736
 
3737
  if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3738
      && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3739
      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3740
      && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3741
    {
3742
      nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3743
                                      known_x, known_mode, known_ret);
3744
      nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3745
      return nonzero;
3746
    }
3747
#endif
3748
 
3749
  code = GET_CODE (x);
3750
  switch (code)
3751
    {
3752
    case REG:
3753
#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3754
      /* If pointers extend unsigned and this is a pointer in Pmode, say that
3755
         all the bits above ptr_mode are known to be zero.  */
3756
      /* As we do not know which address space the pointer is refering to,
3757
         we can do this only if the target does not support different pointer
3758
         or address modes depending on the address space.  */
3759
      if (target_default_pointer_address_modes_p ()
3760
          && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3761
          && REG_POINTER (x))
3762
        nonzero &= GET_MODE_MASK (ptr_mode);
3763
#endif
3764
 
3765
      /* Include declared information about alignment of pointers.  */
3766
      /* ??? We don't properly preserve REG_POINTER changes across
3767
         pointer-to-integer casts, so we can't trust it except for
3768
         things that we know must be pointers.  See execute/960116-1.c.  */
3769
      if ((x == stack_pointer_rtx
3770
           || x == frame_pointer_rtx
3771
           || x == arg_pointer_rtx)
3772
          && REGNO_POINTER_ALIGN (REGNO (x)))
3773
        {
3774
          unsigned HOST_WIDE_INT alignment
3775
            = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3776
 
3777
#ifdef PUSH_ROUNDING
3778
          /* If PUSH_ROUNDING is defined, it is possible for the
3779
             stack to be momentarily aligned only to that amount,
3780
             so we pick the least alignment.  */
3781
          if (x == stack_pointer_rtx && PUSH_ARGS)
3782
            alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3783
                             alignment);
3784
#endif
3785
 
3786
          nonzero &= ~(alignment - 1);
3787
        }
3788
 
3789
      {
3790
        unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3791
        rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3792
                                              known_mode, known_ret,
3793
                                              &nonzero_for_hook);
3794
 
3795
        if (new_rtx)
3796
          nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
3797
                                                   known_mode, known_ret);
3798
 
3799
        return nonzero_for_hook;
3800
      }
3801
 
3802
    case CONST_INT:
3803
#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3804
      /* If X is negative in MODE, sign-extend the value.  */
3805
      if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3806
          && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3807
        return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3808
#endif
3809
 
3810
      return INTVAL (x);
3811
 
3812
    case MEM:
3813
#ifdef LOAD_EXTEND_OP
3814
      /* In many, if not most, RISC machines, reading a byte from memory
3815
         zeros the rest of the register.  Noticing that fact saves a lot
3816
         of extra zero-extends.  */
3817
      if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3818
        nonzero &= GET_MODE_MASK (GET_MODE (x));
3819
#endif
3820
      break;
3821
 
3822
    case EQ:  case NE:
3823
    case UNEQ:  case LTGT:
3824
    case GT:  case GTU:  case UNGT:
3825
    case LT:  case LTU:  case UNLT:
3826
    case GE:  case GEU:  case UNGE:
3827
    case LE:  case LEU:  case UNLE:
3828
    case UNORDERED: case ORDERED:
3829
      /* If this produces an integer result, we know which bits are set.
3830
         Code here used to clear bits outside the mode of X, but that is
3831
         now done above.  */
3832
      /* Mind that MODE is the mode the caller wants to look at this
3833
         operation in, and not the actual operation mode.  We can wind
3834
         up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3835
         that describes the results of a vector compare.  */
3836
      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3837
          && mode_width <= HOST_BITS_PER_WIDE_INT)
3838
        nonzero = STORE_FLAG_VALUE;
3839
      break;
3840
 
3841
    case NEG:
3842
#if 0
3843
      /* Disabled to avoid exponential mutual recursion between nonzero_bits
3844
         and num_sign_bit_copies.  */
3845
      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3846
          == GET_MODE_BITSIZE (GET_MODE (x)))
3847
        nonzero = 1;
3848
#endif
3849
 
3850
      if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3851
        nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3852
      break;
3853
 
3854
    case ABS:
3855
#if 0
3856
      /* Disabled to avoid exponential mutual recursion between nonzero_bits
3857
         and num_sign_bit_copies.  */
3858
      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3859
          == GET_MODE_BITSIZE (GET_MODE (x)))
3860
        nonzero = 1;
3861
#endif
3862
      break;
3863
 
3864
    case TRUNCATE:
3865
      nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3866
                                       known_x, known_mode, known_ret)
3867
                  & GET_MODE_MASK (mode));
3868
      break;
3869
 
3870
    case ZERO_EXTEND:
3871
      nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3872
                                      known_x, known_mode, known_ret);
3873
      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3874
        nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3875
      break;
3876
 
3877
    case SIGN_EXTEND:
3878
      /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3879
         Otherwise, show all the bits in the outer mode but not the inner
3880
         may be nonzero.  */
3881
      inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3882
                                      known_x, known_mode, known_ret);
3883
      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3884
        {
3885
          inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3886
          if (inner_nz
3887
              & (((HOST_WIDE_INT) 1
3888
                  << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3889
            inner_nz |= (GET_MODE_MASK (mode)
3890
                         & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3891
        }
3892
 
3893
      nonzero &= inner_nz;
3894
      break;
3895
 
3896
    case AND:
3897
      nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3898
                                       known_x, known_mode, known_ret)
3899
                 & cached_nonzero_bits (XEXP (x, 1), mode,
3900
                                        known_x, known_mode, known_ret);
3901
      break;
3902
 
3903
    case XOR:   case IOR:
3904
    case UMIN:  case UMAX:  case SMIN:  case SMAX:
3905
      {
3906
        unsigned HOST_WIDE_INT nonzero0 =
3907
          cached_nonzero_bits (XEXP (x, 0), mode,
3908
                               known_x, known_mode, known_ret);
3909
 
3910
        /* Don't call nonzero_bits for the second time if it cannot change
3911
           anything.  */
3912
        if ((nonzero & nonzero0) != nonzero)
3913
          nonzero &= nonzero0
3914
                     | cached_nonzero_bits (XEXP (x, 1), mode,
3915
                                            known_x, known_mode, known_ret);
3916
      }
3917
      break;
3918
 
3919
    case PLUS:  case MINUS:
3920
    case MULT:
3921
    case DIV:   case UDIV:
3922
    case MOD:   case UMOD:
3923
      /* We can apply the rules of arithmetic to compute the number of
3924
         high- and low-order zero bits of these operations.  We start by
3925
         computing the width (position of the highest-order nonzero bit)
3926
         and the number of low-order zero bits for each value.  */
3927
      {
3928
        unsigned HOST_WIDE_INT nz0 =
3929
          cached_nonzero_bits (XEXP (x, 0), mode,
3930
                               known_x, known_mode, known_ret);
3931
        unsigned HOST_WIDE_INT nz1 =
3932
          cached_nonzero_bits (XEXP (x, 1), mode,
3933
                               known_x, known_mode, known_ret);
3934
        int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3935
        int width0 = floor_log2 (nz0) + 1;
3936
        int width1 = floor_log2 (nz1) + 1;
3937
        int low0 = floor_log2 (nz0 & -nz0);
3938
        int low1 = floor_log2 (nz1 & -nz1);
3939
        HOST_WIDE_INT op0_maybe_minusp
3940
          = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3941
        HOST_WIDE_INT op1_maybe_minusp
3942
          = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3943
        unsigned int result_width = mode_width;
3944
        int result_low = 0;
3945
 
3946
        switch (code)
3947
          {
3948
          case PLUS:
3949
            result_width = MAX (width0, width1) + 1;
3950
            result_low = MIN (low0, low1);
3951
            break;
3952
          case MINUS:
3953
            result_low = MIN (low0, low1);
3954
            break;
3955
          case MULT:
3956
            result_width = width0 + width1;
3957
            result_low = low0 + low1;
3958
            break;
3959
          case DIV:
3960
            if (width1 == 0)
3961
              break;
3962
            if (! op0_maybe_minusp && ! op1_maybe_minusp)
3963
              result_width = width0;
3964
            break;
3965
          case UDIV:
3966
            if (width1 == 0)
3967
              break;
3968
            result_width = width0;
3969
            break;
3970
          case MOD:
3971
            if (width1 == 0)
3972
              break;
3973
            if (! op0_maybe_minusp && ! op1_maybe_minusp)
3974
              result_width = MIN (width0, width1);
3975
            result_low = MIN (low0, low1);
3976
            break;
3977
          case UMOD:
3978
            if (width1 == 0)
3979
              break;
3980
            result_width = MIN (width0, width1);
3981
            result_low = MIN (low0, low1);
3982
            break;
3983
          default:
3984
            gcc_unreachable ();
3985
          }
3986
 
3987
        if (result_width < mode_width)
3988
          nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3989
 
3990
        if (result_low > 0)
3991
          nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3992
 
3993
#ifdef POINTERS_EXTEND_UNSIGNED
3994
        /* If pointers extend unsigned and this is an addition or subtraction
3995
           to a pointer in Pmode, all the bits above ptr_mode are known to be
3996
           zero.  */
3997
        /* As we do not know which address space the pointer is refering to,
3998
           we can do this only if the target does not support different pointer
3999
           or address modes depending on the address space.  */
4000
        if (target_default_pointer_address_modes_p ()
4001
            && POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
4002
            && (code == PLUS || code == MINUS)
4003
            && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4004
          nonzero &= GET_MODE_MASK (ptr_mode);
4005
#endif
4006
      }
4007
      break;
4008
 
4009
    case ZERO_EXTRACT:
4010
      if (CONST_INT_P (XEXP (x, 1))
4011
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4012
        nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
4013
      break;
4014
 
4015
    case SUBREG:
4016
      /* If this is a SUBREG formed for a promoted variable that has
4017
         been zero-extended, we know that at least the high-order bits
4018
         are zero, though others might be too.  */
4019
 
4020
      if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
4021
        nonzero = GET_MODE_MASK (GET_MODE (x))
4022
                  & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4023
                                         known_x, known_mode, known_ret);
4024
 
4025
      /* If the inner mode is a single word for both the host and target
4026
         machines, we can compute this from which bits of the inner
4027
         object might be nonzero.  */
4028
      if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
4029
          && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4030
              <= HOST_BITS_PER_WIDE_INT))
4031
        {
4032
          nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4033
                                          known_x, known_mode, known_ret);
4034
 
4035
#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4036
          /* If this is a typical RISC machine, we only have to worry
4037
             about the way loads are extended.  */
4038
          if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4039
               ? (((nonzero
4040
                    & (((unsigned HOST_WIDE_INT) 1
4041
                        << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
4042
                   != 0))
4043
               : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
4044
              || !MEM_P (SUBREG_REG (x)))
4045
#endif
4046
            {
4047
              /* On many CISC machines, accessing an object in a wider mode
4048
                 causes the high-order bits to become undefined.  So they are
4049
                 not known to be zero.  */
4050
              if (GET_MODE_SIZE (GET_MODE (x))
4051
                  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4052
                nonzero |= (GET_MODE_MASK (GET_MODE (x))
4053
                            & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
4054
            }
4055
        }
4056
      break;
4057
 
4058
    case ASHIFTRT:
4059
    case LSHIFTRT:
4060
    case ASHIFT:
4061
    case ROTATE:
4062
      /* The nonzero bits are in two classes: any bits within MODE
4063
         that aren't in GET_MODE (x) are always significant.  The rest of the
4064
         nonzero bits are those that are significant in the operand of
4065
         the shift when shifted the appropriate number of bits.  This
4066
         shows that high-order bits are cleared by the right shift and
4067
         low-order bits by left shifts.  */
4068
      if (CONST_INT_P (XEXP (x, 1))
4069
          && INTVAL (XEXP (x, 1)) >= 0
4070
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4071
          && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x)))
4072
        {
4073
          enum machine_mode inner_mode = GET_MODE (x);
4074
          unsigned int width = GET_MODE_BITSIZE (inner_mode);
4075
          int count = INTVAL (XEXP (x, 1));
4076
          unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4077
          unsigned HOST_WIDE_INT op_nonzero =
4078
            cached_nonzero_bits (XEXP (x, 0), mode,
4079
                                 known_x, known_mode, known_ret);
4080
          unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4081
          unsigned HOST_WIDE_INT outer = 0;
4082
 
4083
          if (mode_width > width)
4084
            outer = (op_nonzero & nonzero & ~mode_mask);
4085
 
4086
          if (code == LSHIFTRT)
4087
            inner >>= count;
4088
          else if (code == ASHIFTRT)
4089
            {
4090
              inner >>= count;
4091
 
4092
              /* If the sign bit may have been nonzero before the shift, we
4093
                 need to mark all the places it could have been copied to
4094
                 by the shift as possibly nonzero.  */
4095
              if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
4096
                inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
4097
            }
4098
          else if (code == ASHIFT)
4099
            inner <<= count;
4100
          else
4101
            inner = ((inner << (count % width)
4102
                      | (inner >> (width - (count % width)))) & mode_mask);
4103
 
4104
          nonzero &= (outer | inner);
4105
        }
4106
      break;
4107
 
4108
    case FFS:
4109
    case POPCOUNT:
4110
      /* This is at most the number of bits in the mode.  */
4111
      nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4112
      break;
4113
 
4114
    case CLZ:
4115
      /* If CLZ has a known value at zero, then the nonzero bits are
4116
         that value, plus the number of bits in the mode minus one.  */
4117
      if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4118
        nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4119
      else
4120
        nonzero = -1;
4121
      break;
4122
 
4123
    case CTZ:
4124
      /* If CTZ has a known value at zero, then the nonzero bits are
4125
         that value, plus the number of bits in the mode minus one.  */
4126
      if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4127
        nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4128
      else
4129
        nonzero = -1;
4130
      break;
4131
 
4132
    case PARITY:
4133
      nonzero = 1;
4134
      break;
4135
 
4136
    case IF_THEN_ELSE:
4137
      {
4138
        unsigned HOST_WIDE_INT nonzero_true =
4139
          cached_nonzero_bits (XEXP (x, 1), mode,
4140
                               known_x, known_mode, known_ret);
4141
 
4142
        /* Don't call nonzero_bits for the second time if it cannot change
4143
           anything.  */
4144
        if ((nonzero & nonzero_true) != nonzero)
4145
          nonzero &= nonzero_true
4146
                     | cached_nonzero_bits (XEXP (x, 2), mode,
4147
                                            known_x, known_mode, known_ret);
4148
      }
4149
      break;
4150
 
4151
    default:
4152
      break;
4153
    }
4154
 
4155
  return nonzero;
4156
}
4157
 
4158
/* See the macro definition above.  */
4159
#undef cached_num_sign_bit_copies
4160
 
4161
 
4162
/* The function cached_num_sign_bit_copies is a wrapper around
4163
   num_sign_bit_copies1.  It avoids exponential behavior in
4164
   num_sign_bit_copies1 when X has identical subexpressions on the
4165
   first or the second level.  */
4166
 
4167
static unsigned int
4168
cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
4169
                            enum machine_mode known_mode,
4170
                            unsigned int known_ret)
4171
{
4172
  if (x == known_x && mode == known_mode)
4173
    return known_ret;
4174
 
4175
  /* Try to find identical subexpressions.  If found call
4176
     num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4177
     the precomputed value for the subexpression as KNOWN_RET.  */
4178
 
4179
  if (ARITHMETIC_P (x))
4180
    {
4181
      rtx x0 = XEXP (x, 0);
4182
      rtx x1 = XEXP (x, 1);
4183
 
4184
      /* Check the first level.  */
4185
      if (x0 == x1)
4186
        return
4187
          num_sign_bit_copies1 (x, mode, x0, mode,
4188
                                cached_num_sign_bit_copies (x0, mode, known_x,
4189
                                                            known_mode,
4190
                                                            known_ret));
4191
 
4192
      /* Check the second level.  */
4193
      if (ARITHMETIC_P (x0)
4194
          && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4195
        return
4196
          num_sign_bit_copies1 (x, mode, x1, mode,
4197
                                cached_num_sign_bit_copies (x1, mode, known_x,
4198
                                                            known_mode,
4199
                                                            known_ret));
4200
 
4201
      if (ARITHMETIC_P (x1)
4202
          && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4203
        return
4204
          num_sign_bit_copies1 (x, mode, x0, mode,
4205
                                cached_num_sign_bit_copies (x0, mode, known_x,
4206
                                                            known_mode,
4207
                                                            known_ret));
4208
    }
4209
 
4210
  return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4211
}
4212
 
4213
/* Return the number of bits at the high-order end of X that are known to
4214
   be equal to the sign bit.  X will be used in mode MODE; if MODE is
4215
   VOIDmode, X will be used in its own mode.  The returned value  will always
4216
   be between 1 and the number of bits in MODE.  */
4217
 
4218
static unsigned int
4219
num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
4220
                      enum machine_mode known_mode,
4221
                      unsigned int known_ret)
4222
{
4223
  enum rtx_code code = GET_CODE (x);
4224
  unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4225
  int num0, num1, result;
4226
  unsigned HOST_WIDE_INT nonzero;
4227
 
4228
  /* If we weren't given a mode, use the mode of X.  If the mode is still
4229
     VOIDmode, we don't know anything.  Likewise if one of the modes is
4230
     floating-point.  */
4231
 
4232
  if (mode == VOIDmode)
4233
    mode = GET_MODE (x);
4234
 
4235
  if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))
4236
      || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
4237
    return 1;
4238
 
4239
  /* For a smaller object, just ignore the high bits.  */
4240
  if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4241
    {
4242
      num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4243
                                         known_x, known_mode, known_ret);
4244
      return MAX (1,
4245
                  num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4246
    }
4247
 
4248
  if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4249
    {
4250
#ifndef WORD_REGISTER_OPERATIONS
4251
  /* If this machine does not do all register operations on the entire
4252
     register and MODE is wider than the mode of X, we can say nothing
4253
     at all about the high-order bits.  */
4254
      return 1;
4255
#else
4256
      /* Likewise on machines that do, if the mode of the object is smaller
4257
         than a word and loads of that size don't sign extend, we can say
4258
         nothing about the high order bits.  */
4259
      if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4260
#ifdef LOAD_EXTEND_OP
4261
          && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4262
#endif
4263
          )
4264
        return 1;
4265
#endif
4266
    }
4267
 
4268
  switch (code)
4269
    {
4270
    case REG:
4271
 
4272
#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4273
      /* If pointers extend signed and this is a pointer in Pmode, say that
4274
         all the bits above ptr_mode are known to be sign bit copies.  */
4275
      /* As we do not know which address space the pointer is refering to,
4276
         we can do this only if the target does not support different pointer
4277
         or address modes depending on the address space.  */
4278
      if (target_default_pointer_address_modes_p ()
4279
          && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4280
          && mode == Pmode && REG_POINTER (x))
4281
        return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4282
#endif
4283
 
4284
      {
4285
        unsigned int copies_for_hook = 1, copies = 1;
4286
        rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4287
                                                     known_mode, known_ret,
4288
                                                     &copies_for_hook);
4289
 
4290
        if (new_rtx)
4291
          copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
4292
                                               known_mode, known_ret);
4293
 
4294
        if (copies > 1 || copies_for_hook > 1)
4295
          return MAX (copies, copies_for_hook);
4296
 
4297
        /* Else, use nonzero_bits to guess num_sign_bit_copies (see below).  */
4298
      }
4299
      break;
4300
 
4301
    case MEM:
4302
#ifdef LOAD_EXTEND_OP
4303
      /* Some RISC machines sign-extend all loads of smaller than a word.  */
4304
      if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4305
        return MAX (1, ((int) bitwidth
4306
                        - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4307
#endif
4308
      break;
4309
 
4310
    case CONST_INT:
4311
      /* If the constant is negative, take its 1's complement and remask.
4312
         Then see how many zero bits we have.  */
4313
      nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4314
      if (bitwidth <= HOST_BITS_PER_WIDE_INT
4315
          && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4316
        nonzero = (~nonzero) & GET_MODE_MASK (mode);
4317
 
4318
      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4319
 
4320
    case SUBREG:
4321
      /* If this is a SUBREG for a promoted object that is sign-extended
4322
         and we are looking at it in a wider mode, we know that at least the
4323
         high-order bits are known to be sign bit copies.  */
4324
 
4325
      if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4326
        {
4327
          num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4328
                                             known_x, known_mode, known_ret);
4329
          return MAX ((int) bitwidth
4330
                      - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4331
                      num0);
4332
        }
4333
 
4334
      /* For a smaller object, just ignore the high bits.  */
4335
      if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4336
        {
4337
          num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4338
                                             known_x, known_mode, known_ret);
4339
          return MAX (1, (num0
4340
                          - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4341
                                   - bitwidth)));
4342
        }
4343
 
4344
#ifdef WORD_REGISTER_OPERATIONS
4345
#ifdef LOAD_EXTEND_OP
4346
      /* For paradoxical SUBREGs on machines where all register operations
4347
         affect the entire register, just look inside.  Note that we are
4348
         passing MODE to the recursive call, so the number of sign bit copies
4349
         will remain relative to that mode, not the inner mode.  */
4350
 
4351
      /* This works only if loads sign extend.  Otherwise, if we get a
4352
         reload for the inner part, it may be loaded from the stack, and
4353
         then we lose all sign bit copies that existed before the store
4354
         to the stack.  */
4355
 
4356
      if ((GET_MODE_SIZE (GET_MODE (x))
4357
           > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4358
          && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4359
          && MEM_P (SUBREG_REG (x)))
4360
        return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4361
                                           known_x, known_mode, known_ret);
4362
#endif
4363
#endif
4364
      break;
4365
 
4366
    case SIGN_EXTRACT:
4367
      if (CONST_INT_P (XEXP (x, 1)))
4368
        return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4369
      break;
4370
 
4371
    case SIGN_EXTEND:
4372
      return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4373
              + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4374
                                            known_x, known_mode, known_ret));
4375
 
4376
    case TRUNCATE:
4377
      /* For a smaller object, just ignore the high bits.  */
4378
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4379
                                         known_x, known_mode, known_ret);
4380
      return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4381
                                    - bitwidth)));
4382
 
4383
    case NOT:
4384
      return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4385
                                         known_x, known_mode, known_ret);
4386
 
4387
    case ROTATE:       case ROTATERT:
4388
      /* If we are rotating left by a number of bits less than the number
4389
         of sign bit copies, we can just subtract that amount from the
4390
         number.  */
4391
      if (CONST_INT_P (XEXP (x, 1))
4392
          && INTVAL (XEXP (x, 1)) >= 0
4393
          && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4394
        {
4395
          num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4396
                                             known_x, known_mode, known_ret);
4397
          return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4398
                                 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4399
        }
4400
      break;
4401
 
4402
    case NEG:
4403
      /* In general, this subtracts one sign bit copy.  But if the value
4404
         is known to be positive, the number of sign bit copies is the
4405
         same as that of the input.  Finally, if the input has just one bit
4406
         that might be nonzero, all the bits are copies of the sign bit.  */
4407
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4408
                                         known_x, known_mode, known_ret);
4409
      if (bitwidth > HOST_BITS_PER_WIDE_INT)
4410
        return num0 > 1 ? num0 - 1 : 1;
4411
 
4412
      nonzero = nonzero_bits (XEXP (x, 0), mode);
4413
      if (nonzero == 1)
4414
        return bitwidth;
4415
 
4416
      if (num0 > 1
4417
          && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4418
        num0--;
4419
 
4420
      return num0;
4421
 
4422
    case IOR:   case AND:   case XOR:
4423
    case SMIN:  case SMAX:  case UMIN:  case UMAX:
4424
      /* Logical operations will preserve the number of sign-bit copies.
4425
         MIN and MAX operations always return one of the operands.  */
4426
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4427
                                         known_x, known_mode, known_ret);
4428
      num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4429
                                         known_x, known_mode, known_ret);
4430
 
4431
      /* If num1 is clearing some of the top bits then regardless of
4432
         the other term, we are guaranteed to have at least that many
4433
         high-order zero bits.  */
4434
      if (code == AND
4435
          && num1 > 1
4436
          && bitwidth <= HOST_BITS_PER_WIDE_INT
4437
          && CONST_INT_P (XEXP (x, 1))
4438
          && !(INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4439
        return num1;
4440
 
4441
      /* Similarly for IOR when setting high-order bits.  */
4442
      if (code == IOR
4443
          && num1 > 1
4444
          && bitwidth <= HOST_BITS_PER_WIDE_INT
4445
          && CONST_INT_P (XEXP (x, 1))
4446
          && (INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4447
        return num1;
4448
 
4449
      return MIN (num0, num1);
4450
 
4451
    case PLUS:  case MINUS:
4452
      /* For addition and subtraction, we can have a 1-bit carry.  However,
4453
         if we are subtracting 1 from a positive number, there will not
4454
         be such a carry.  Furthermore, if the positive number is known to
4455
         be 0 or 1, we know the result is either -1 or 0.  */
4456
 
4457
      if (code == PLUS && XEXP (x, 1) == constm1_rtx
4458
          && bitwidth <= HOST_BITS_PER_WIDE_INT)
4459
        {
4460
          nonzero = nonzero_bits (XEXP (x, 0), mode);
4461
          if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4462
            return (nonzero == 1 || nonzero == 0 ? bitwidth
4463
                    : bitwidth - floor_log2 (nonzero) - 1);
4464
        }
4465
 
4466
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4467
                                         known_x, known_mode, known_ret);
4468
      num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4469
                                         known_x, known_mode, known_ret);
4470
      result = MAX (1, MIN (num0, num1) - 1);
4471
 
4472
#ifdef POINTERS_EXTEND_UNSIGNED
4473
      /* If pointers extend signed and this is an addition or subtraction
4474
         to a pointer in Pmode, all the bits above ptr_mode are known to be
4475
         sign bit copies.  */
4476
      /* As we do not know which address space the pointer is refering to,
4477
         we can do this only if the target does not support different pointer
4478
         or address modes depending on the address space.  */
4479
      if (target_default_pointer_address_modes_p ()
4480
          && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4481
          && (code == PLUS || code == MINUS)
4482
          && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4483
        result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4484
                             - GET_MODE_BITSIZE (ptr_mode) + 1),
4485
                      result);
4486
#endif
4487
      return result;
4488
 
4489
    case MULT:
4490
      /* The number of bits of the product is the sum of the number of
4491
         bits of both terms.  However, unless one of the terms if known
4492
         to be positive, we must allow for an additional bit since negating
4493
         a negative number can remove one sign bit copy.  */
4494
 
4495
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4496
                                         known_x, known_mode, known_ret);
4497
      num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4498
                                         known_x, known_mode, known_ret);
4499
 
4500
      result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4501
      if (result > 0
4502
          && (bitwidth > HOST_BITS_PER_WIDE_INT
4503
              || (((nonzero_bits (XEXP (x, 0), mode)
4504
                    & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4505
                  && ((nonzero_bits (XEXP (x, 1), mode)
4506
                       & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4507
        result--;
4508
 
4509
      return MAX (1, result);
4510
 
4511
    case UDIV:
4512
      /* The result must be <= the first operand.  If the first operand
4513
         has the high bit set, we know nothing about the number of sign
4514
         bit copies.  */
4515
      if (bitwidth > HOST_BITS_PER_WIDE_INT)
4516
        return 1;
4517
      else if ((nonzero_bits (XEXP (x, 0), mode)
4518
                & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4519
        return 1;
4520
      else
4521
        return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4522
                                           known_x, known_mode, known_ret);
4523
 
4524
    case UMOD:
4525
      /* The result must be <= the second operand.  If the second operand
4526
         has (or just might have) the high bit set, we know nothing about
4527
         the number of sign bit copies.  */
4528
      if (bitwidth > HOST_BITS_PER_WIDE_INT)
4529
        return 1;
4530
      else if ((nonzero_bits (XEXP (x, 1), mode)
4531
                & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4532
        return 1;
4533
      else
4534
        return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4535
                                           known_x, known_mode, known_ret);
4536
 
4537
    case DIV:
4538
      /* Similar to unsigned division, except that we have to worry about
4539
         the case where the divisor is negative, in which case we have
4540
         to add 1.  */
4541
      result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4542
                                           known_x, known_mode, known_ret);
4543
      if (result > 1
4544
          && (bitwidth > HOST_BITS_PER_WIDE_INT
4545
              || (nonzero_bits (XEXP (x, 1), mode)
4546
                  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4547
        result--;
4548
 
4549
      return result;
4550
 
4551
    case MOD:
4552
      result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4553
                                           known_x, known_mode, known_ret);
4554
      if (result > 1
4555
          && (bitwidth > HOST_BITS_PER_WIDE_INT
4556
              || (nonzero_bits (XEXP (x, 1), mode)
4557
                  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4558
        result--;
4559
 
4560
      return result;
4561
 
4562
    case ASHIFTRT:
4563
      /* Shifts by a constant add to the number of bits equal to the
4564
         sign bit.  */
4565
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4566
                                         known_x, known_mode, known_ret);
4567
      if (CONST_INT_P (XEXP (x, 1))
4568
          && INTVAL (XEXP (x, 1)) > 0
4569
          && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x)))
4570
        num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4571
 
4572
      return num0;
4573
 
4574
    case ASHIFT:
4575
      /* Left shifts destroy copies.  */
4576
      if (!CONST_INT_P (XEXP (x, 1))
4577
          || INTVAL (XEXP (x, 1)) < 0
4578
          || INTVAL (XEXP (x, 1)) >= (int) bitwidth
4579
          || INTVAL (XEXP (x, 1)) >= GET_MODE_BITSIZE (GET_MODE (x)))
4580
        return 1;
4581
 
4582
      num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4583
                                         known_x, known_mode, known_ret);
4584
      return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4585
 
4586
    case IF_THEN_ELSE:
4587
      num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4588
                                         known_x, known_mode, known_ret);
4589
      num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4590
                                         known_x, known_mode, known_ret);
4591
      return MIN (num0, num1);
4592
 
4593
    case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
4594
    case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
4595
    case GEU: case GTU: case LEU: case LTU:
4596
    case UNORDERED: case ORDERED:
4597
      /* If the constant is negative, take its 1's complement and remask.
4598
         Then see how many zero bits we have.  */
4599
      nonzero = STORE_FLAG_VALUE;
4600
      if (bitwidth <= HOST_BITS_PER_WIDE_INT
4601
          && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4602
        nonzero = (~nonzero) & GET_MODE_MASK (mode);
4603
 
4604
      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4605
 
4606
    default:
4607
      break;
4608
    }
4609
 
4610
  /* If we haven't been able to figure it out by one of the above rules,
4611
     see if some of the high-order bits are known to be zero.  If so,
4612
     count those bits and return one less than that amount.  If we can't
4613
     safely compute the mask for this mode, always return BITWIDTH.  */
4614
 
4615
  bitwidth = GET_MODE_BITSIZE (mode);
4616
  if (bitwidth > HOST_BITS_PER_WIDE_INT)
4617
    return 1;
4618
 
4619
  nonzero = nonzero_bits (x, mode);
4620
  return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4621
         ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4622
}
4623
 
4624
/* Calculate the rtx_cost of a single instruction.  A return value of
4625
   zero indicates an instruction pattern without a known cost.  */
4626
 
4627
int
4628
insn_rtx_cost (rtx pat, bool speed)
4629
{
4630
  int i, cost;
4631
  rtx set;
4632
 
4633
  /* Extract the single set rtx from the instruction pattern.
4634
     We can't use single_set since we only have the pattern.  */
4635
  if (GET_CODE (pat) == SET)
4636
    set = pat;
4637
  else if (GET_CODE (pat) == PARALLEL)
4638
    {
4639
      set = NULL_RTX;
4640
      for (i = 0; i < XVECLEN (pat, 0); i++)
4641
        {
4642
          rtx x = XVECEXP (pat, 0, i);
4643
          if (GET_CODE (x) == SET)
4644
            {
4645
              if (set)
4646
                return 0;
4647
              set = x;
4648
            }
4649
        }
4650
      if (!set)
4651
        return 0;
4652
    }
4653
  else
4654
    return 0;
4655
 
4656
  cost = rtx_cost (SET_SRC (set), SET, speed);
4657
  return cost > 0 ? cost : COSTS_N_INSNS (1);
4658
}
4659
 
4660
/* Given an insn INSN and condition COND, return the condition in a
4661
   canonical form to simplify testing by callers.  Specifically:
4662
 
4663
   (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4664
   (2) Both operands will be machine operands; (cc0) will have been replaced.
4665
   (3) If an operand is a constant, it will be the second operand.
4666
   (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4667
       for GE, GEU, and LEU.
4668
 
4669
   If the condition cannot be understood, or is an inequality floating-point
4670
   comparison which needs to be reversed, 0 will be returned.
4671
 
4672
   If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4673
 
4674
   If EARLIEST is nonzero, it is a pointer to a place where the earliest
4675
   insn used in locating the condition was found.  If a replacement test
4676
   of the condition is desired, it should be placed in front of that
4677
   insn and we will be sure that the inputs are still valid.
4678
 
4679
   If WANT_REG is nonzero, we wish the condition to be relative to that
4680
   register, if possible.  Therefore, do not canonicalize the condition
4681
   further.  If ALLOW_CC_MODE is nonzero, allow the condition returned
4682
   to be a compare to a CC mode register.
4683
 
4684
   If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4685
   and at INSN.  */
4686
 
4687
rtx
4688
canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4689
                        rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4690
{
4691
  enum rtx_code code;
4692
  rtx prev = insn;
4693
  const_rtx set;
4694
  rtx tem;
4695
  rtx op0, op1;
4696
  int reverse_code = 0;
4697
  enum machine_mode mode;
4698
  basic_block bb = BLOCK_FOR_INSN (insn);
4699
 
4700
  code = GET_CODE (cond);
4701
  mode = GET_MODE (cond);
4702
  op0 = XEXP (cond, 0);
4703
  op1 = XEXP (cond, 1);
4704
 
4705
  if (reverse)
4706
    code = reversed_comparison_code (cond, insn);
4707
  if (code == UNKNOWN)
4708
    return 0;
4709
 
4710
  if (earliest)
4711
    *earliest = insn;
4712
 
4713
  /* If we are comparing a register with zero, see if the register is set
4714
     in the previous insn to a COMPARE or a comparison operation.  Perform
4715
     the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4716
     in cse.c  */
4717
 
4718
  while ((GET_RTX_CLASS (code) == RTX_COMPARE
4719
          || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4720
         && op1 == CONST0_RTX (GET_MODE (op0))
4721
         && op0 != want_reg)
4722
    {
4723
      /* Set nonzero when we find something of interest.  */
4724
      rtx x = 0;
4725
 
4726
#ifdef HAVE_cc0
4727
      /* If comparison with cc0, import actual comparison from compare
4728
         insn.  */
4729
      if (op0 == cc0_rtx)
4730
        {
4731
          if ((prev = prev_nonnote_insn (prev)) == 0
4732
              || !NONJUMP_INSN_P (prev)
4733
              || (set = single_set (prev)) == 0
4734
              || SET_DEST (set) != cc0_rtx)
4735
            return 0;
4736
 
4737
          op0 = SET_SRC (set);
4738
          op1 = CONST0_RTX (GET_MODE (op0));
4739
          if (earliest)
4740
            *earliest = prev;
4741
        }
4742
#endif
4743
 
4744
      /* If this is a COMPARE, pick up the two things being compared.  */
4745
      if (GET_CODE (op0) == COMPARE)
4746
        {
4747
          op1 = XEXP (op0, 1);
4748
          op0 = XEXP (op0, 0);
4749
          continue;
4750
        }
4751
      else if (!REG_P (op0))
4752
        break;
4753
 
4754
      /* Go back to the previous insn.  Stop if it is not an INSN.  We also
4755
         stop if it isn't a single set or if it has a REG_INC note because
4756
         we don't want to bother dealing with it.  */
4757
 
4758
      do
4759
        prev = prev_nonnote_insn (prev);
4760
      while (prev && DEBUG_INSN_P (prev));
4761
 
4762
      if (prev == 0
4763
          || !NONJUMP_INSN_P (prev)
4764
          || FIND_REG_INC_NOTE (prev, NULL_RTX)
4765
          /* In cfglayout mode, there do not have to be labels at the
4766
             beginning of a block, or jumps at the end, so the previous
4767
             conditions would not stop us when we reach bb boundary.  */
4768
          || BLOCK_FOR_INSN (prev) != bb)
4769
        break;
4770
 
4771
      set = set_of (op0, prev);
4772
 
4773
      if (set
4774
          && (GET_CODE (set) != SET
4775
              || !rtx_equal_p (SET_DEST (set), op0)))
4776
        break;
4777
 
4778
      /* If this is setting OP0, get what it sets it to if it looks
4779
         relevant.  */
4780
      if (set)
4781
        {
4782
          enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4783
#ifdef FLOAT_STORE_FLAG_VALUE
4784
          REAL_VALUE_TYPE fsfv;
4785
#endif
4786
 
4787
          /* ??? We may not combine comparisons done in a CCmode with
4788
             comparisons not done in a CCmode.  This is to aid targets
4789
             like Alpha that have an IEEE compliant EQ instruction, and
4790
             a non-IEEE compliant BEQ instruction.  The use of CCmode is
4791
             actually artificial, simply to prevent the combination, but
4792
             should not affect other platforms.
4793
 
4794
             However, we must allow VOIDmode comparisons to match either
4795
             CCmode or non-CCmode comparison, because some ports have
4796
             modeless comparisons inside branch patterns.
4797
 
4798
             ??? This mode check should perhaps look more like the mode check
4799
             in simplify_comparison in combine.  */
4800
 
4801
          if ((GET_CODE (SET_SRC (set)) == COMPARE
4802
               || (((code == NE
4803
                     || (code == LT
4804
                         && GET_MODE_CLASS (inner_mode) == MODE_INT
4805
                         && (GET_MODE_BITSIZE (inner_mode)
4806
                             <= HOST_BITS_PER_WIDE_INT)
4807
                         && (STORE_FLAG_VALUE
4808
                             & ((HOST_WIDE_INT) 1
4809
                                << (GET_MODE_BITSIZE (inner_mode) - 1))))
4810
#ifdef FLOAT_STORE_FLAG_VALUE
4811
                     || (code == LT
4812
                         && SCALAR_FLOAT_MODE_P (inner_mode)
4813
                         && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4814
                             REAL_VALUE_NEGATIVE (fsfv)))
4815
#endif
4816
                     ))
4817
                   && COMPARISON_P (SET_SRC (set))))
4818
              && (((GET_MODE_CLASS (mode) == MODE_CC)
4819
                   == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4820
                  || mode == VOIDmode || inner_mode == VOIDmode))
4821
            x = SET_SRC (set);
4822
          else if (((code == EQ
4823
                     || (code == GE
4824
                         && (GET_MODE_BITSIZE (inner_mode)
4825
                             <= HOST_BITS_PER_WIDE_INT)
4826
                         && GET_MODE_CLASS (inner_mode) == MODE_INT
4827
                         && (STORE_FLAG_VALUE
4828
                             & ((HOST_WIDE_INT) 1
4829
                                << (GET_MODE_BITSIZE (inner_mode) - 1))))
4830
#ifdef FLOAT_STORE_FLAG_VALUE
4831
                     || (code == GE
4832
                         && SCALAR_FLOAT_MODE_P (inner_mode)
4833
                         && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4834
                             REAL_VALUE_NEGATIVE (fsfv)))
4835
#endif
4836
                     ))
4837
                   && COMPARISON_P (SET_SRC (set))
4838
                   && (((GET_MODE_CLASS (mode) == MODE_CC)
4839
                        == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4840
                       || mode == VOIDmode || inner_mode == VOIDmode))
4841
 
4842
            {
4843
              reverse_code = 1;
4844
              x = SET_SRC (set);
4845
            }
4846
          else
4847
            break;
4848
        }
4849
 
4850
      else if (reg_set_p (op0, prev))
4851
        /* If this sets OP0, but not directly, we have to give up.  */
4852
        break;
4853
 
4854
      if (x)
4855
        {
4856
          /* If the caller is expecting the condition to be valid at INSN,
4857
             make sure X doesn't change before INSN.  */
4858
          if (valid_at_insn_p)
4859
            if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4860
              break;
4861
          if (COMPARISON_P (x))
4862
            code = GET_CODE (x);
4863
          if (reverse_code)
4864
            {
4865
              code = reversed_comparison_code (x, prev);
4866
              if (code == UNKNOWN)
4867
                return 0;
4868
              reverse_code = 0;
4869
            }
4870
 
4871
          op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4872
          if (earliest)
4873
            *earliest = prev;
4874
        }
4875
    }
4876
 
4877
  /* If constant is first, put it last.  */
4878
  if (CONSTANT_P (op0))
4879
    code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4880
 
4881
  /* If OP0 is the result of a comparison, we weren't able to find what
4882
     was really being compared, so fail.  */
4883
  if (!allow_cc_mode
4884
      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4885
    return 0;
4886
 
4887
  /* Canonicalize any ordered comparison with integers involving equality
4888
     if we can do computations in the relevant mode and we do not
4889
     overflow.  */
4890
 
4891
  if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4892
      && CONST_INT_P (op1)
4893
      && GET_MODE (op0) != VOIDmode
4894
      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4895
    {
4896
      HOST_WIDE_INT const_val = INTVAL (op1);
4897
      unsigned HOST_WIDE_INT uconst_val = const_val;
4898
      unsigned HOST_WIDE_INT max_val
4899
        = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4900
 
4901
      switch (code)
4902
        {
4903
        case LE:
4904
          if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4905
            code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4906
          break;
4907
 
4908
        /* When cross-compiling, const_val might be sign-extended from
4909
           BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4910
        case GE:
4911
          if ((HOST_WIDE_INT) (const_val & max_val)
4912
              != (((HOST_WIDE_INT) 1
4913
                   << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4914
            code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4915
          break;
4916
 
4917
        case LEU:
4918
          if (uconst_val < max_val)
4919
            code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4920
          break;
4921
 
4922
        case GEU:
4923
          if (uconst_val != 0)
4924
            code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4925
          break;
4926
 
4927
        default:
4928
          break;
4929
        }
4930
    }
4931
 
4932
  /* Never return CC0; return zero instead.  */
4933
  if (CC0_P (op0))
4934
    return 0;
4935
 
4936
  return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4937
}
4938
 
4939
/* Given a jump insn JUMP, return the condition that will cause it to branch
4940
   to its JUMP_LABEL.  If the condition cannot be understood, or is an
4941
   inequality floating-point comparison which needs to be reversed, 0 will
4942
   be returned.
4943
 
4944
   If EARLIEST is nonzero, it is a pointer to a place where the earliest
4945
   insn used in locating the condition was found.  If a replacement test
4946
   of the condition is desired, it should be placed in front of that
4947
   insn and we will be sure that the inputs are still valid.  If EARLIEST
4948
   is null, the returned condition will be valid at INSN.
4949
 
4950
   If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4951
   compare CC mode register.
4952
 
4953
   VALID_AT_INSN_P is the same as for canonicalize_condition.  */
4954
 
4955
rtx
4956
get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4957
{
4958
  rtx cond;
4959
  int reverse;
4960
  rtx set;
4961
 
4962
  /* If this is not a standard conditional jump, we can't parse it.  */
4963
  if (!JUMP_P (jump)
4964
      || ! any_condjump_p (jump))
4965
    return 0;
4966
  set = pc_set (jump);
4967
 
4968
  cond = XEXP (SET_SRC (set), 0);
4969
 
4970
  /* If this branches to JUMP_LABEL when the condition is false, reverse
4971
     the condition.  */
4972
  reverse
4973
    = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4974
      && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4975
 
4976
  return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4977
                                 allow_cc_mode, valid_at_insn_p);
4978
}
4979
 
4980
/* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4981
   TARGET_MODE_REP_EXTENDED.
4982
 
4983
   Note that we assume that the property of
4984
   TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4985
   narrower than mode B.  I.e., if A is a mode narrower than B then in
4986
   order to be able to operate on it in mode B, mode A needs to
4987
   satisfy the requirements set by the representation of mode B.  */
4988
 
4989
static void
4990
init_num_sign_bit_copies_in_rep (void)
4991
{
4992
  enum machine_mode mode, in_mode;
4993
 
4994
  for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4995
       in_mode = GET_MODE_WIDER_MODE (mode))
4996
    for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4997
         mode = GET_MODE_WIDER_MODE (mode))
4998
      {
4999
        enum machine_mode i;
5000
 
5001
        /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
5002
           extends to the next widest mode.  */
5003
        gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
5004
                    || GET_MODE_WIDER_MODE (mode) == in_mode);
5005
 
5006
        /* We are in in_mode.  Count how many bits outside of mode
5007
           have to be copies of the sign-bit.  */
5008
        for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
5009
          {
5010
            enum machine_mode wider = GET_MODE_WIDER_MODE (i);
5011
 
5012
            if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
5013
                /* We can only check sign-bit copies starting from the
5014
                   top-bit.  In order to be able to check the bits we
5015
                   have already seen we pretend that subsequent bits
5016
                   have to be sign-bit copies too.  */
5017
                || num_sign_bit_copies_in_rep [in_mode][mode])
5018
              num_sign_bit_copies_in_rep [in_mode][mode]
5019
                += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
5020
          }
5021
      }
5022
}
5023
 
5024
/* Suppose that truncation from the machine mode of X to MODE is not a
5025
   no-op.  See if there is anything special about X so that we can
5026
   assume it already contains a truncated value of MODE.  */
5027
 
5028
bool
5029
truncated_to_mode (enum machine_mode mode, const_rtx x)
5030
{
5031
  /* This register has already been used in MODE without explicit
5032
     truncation.  */
5033
  if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
5034
    return true;
5035
 
5036
  /* See if we already satisfy the requirements of MODE.  If yes we
5037
     can just switch to MODE.  */
5038
  if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
5039
      && (num_sign_bit_copies (x, GET_MODE (x))
5040
          >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
5041
    return true;
5042
 
5043
  return false;
5044
}
5045
 
5046
/* Initialize non_rtx_starting_operands, which is used to speed up
5047
   for_each_rtx.  */
5048
void
5049
init_rtlanal (void)
5050
{
5051
  int i;
5052
  for (i = 0; i < NUM_RTX_CODE; i++)
5053
    {
5054
      const char *format = GET_RTX_FORMAT (i);
5055
      const char *first = strpbrk (format, "eEV");
5056
      non_rtx_starting_operands[i] = first ? first - format : -1;
5057
    }
5058
 
5059
  init_num_sign_bit_copies_in_rep ();
5060
}
5061
 
5062
/* Check whether this is a constant pool constant.  */
5063
bool
5064
constant_pool_constant_p (rtx x)
5065
{
5066
  x = avoid_constant_pool_reference (x);
5067
  return GET_CODE (x) == CONST_DOUBLE;
5068
}
5069
 
5070
/* If M is a bitmask that selects a field of low-order bits within an item but
5071
   not the entire word, return the length of the field.  Return -1 otherwise.
5072
   M is used in machine mode MODE.  */
5073
 
5074
int
5075
low_bitmask_len (enum machine_mode mode, unsigned HOST_WIDE_INT m)
5076
{
5077
  if (mode != VOIDmode)
5078
    {
5079
      if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
5080
        return -1;
5081
      m &= GET_MODE_MASK (mode);
5082
    }
5083
 
5084
  return exact_log2 (m + 1);
5085
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.