OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [recog.c] - Blame information for rev 803

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Subroutines used by or related to instruction recognition.
2
   Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
 
23
#include "config.h"
24
#include "system.h"
25
#include "coretypes.h"
26
#include "tm.h"
27
#include "rtl-error.h"
28
#include "tm_p.h"
29
#include "insn-config.h"
30
#include "insn-attr.h"
31
#include "hard-reg-set.h"
32
#include "recog.h"
33
#include "regs.h"
34
#include "addresses.h"
35
#include "expr.h"
36
#include "function.h"
37
#include "flags.h"
38
#include "basic-block.h"
39
#include "output.h"
40
#include "reload.h"
41
#include "target.h"
42
#include "timevar.h"
43
#include "tree-pass.h"
44
#include "df.h"
45
 
46
#ifndef STACK_PUSH_CODE
47
#ifdef STACK_GROWS_DOWNWARD
48
#define STACK_PUSH_CODE PRE_DEC
49
#else
50
#define STACK_PUSH_CODE PRE_INC
51
#endif
52
#endif
53
 
54
#ifndef STACK_POP_CODE
55
#ifdef STACK_GROWS_DOWNWARD
56
#define STACK_POP_CODE POST_INC
57
#else
58
#define STACK_POP_CODE POST_DEC
59
#endif
60
#endif
61
 
62
#ifndef HAVE_ATTR_enabled
63
static inline bool
64
get_attr_enabled (rtx insn ATTRIBUTE_UNUSED)
65
{
66
  return true;
67
}
68
#endif
69
 
70
static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
71
static void validate_replace_src_1 (rtx *, void *);
72
static rtx split_insn (rtx);
73
 
74
/* Nonzero means allow operands to be volatile.
75
   This should be 0 if you are generating rtl, such as if you are calling
76
   the functions in optabs.c and expmed.c (most of the time).
77
   This should be 1 if all valid insns need to be recognized,
78
   such as in reginfo.c and final.c and reload.c.
79
 
80
   init_recog and init_recog_no_volatile are responsible for setting this.  */
81
 
82
int volatile_ok;
83
 
84
struct recog_data recog_data;
85
 
86
/* Contains a vector of operand_alternative structures for every operand.
87
   Set up by preprocess_constraints.  */
88
struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
89
 
90
/* On return from `constrain_operands', indicate which alternative
91
   was satisfied.  */
92
 
93
int which_alternative;
94
 
95
/* Nonzero after end of reload pass.
96
   Set to 1 or 0 by toplev.c.
97
   Controls the significance of (SUBREG (MEM)).  */
98
 
99
int reload_completed;
100
 
101
/* Nonzero after thread_prologue_and_epilogue_insns has run.  */
102
int epilogue_completed;
103
 
104
/* Initialize data used by the function `recog'.
105
   This must be called once in the compilation of a function
106
   before any insn recognition may be done in the function.  */
107
 
108
void
109
init_recog_no_volatile (void)
110
{
111
  volatile_ok = 0;
112
}
113
 
114
void
115
init_recog (void)
116
{
117
  volatile_ok = 1;
118
}
119
 
120
 
121
/* Return true if labels in asm operands BODY are LABEL_REFs.  */
122
 
123
static bool
124
asm_labels_ok (rtx body)
125
{
126
  rtx asmop;
127
  int i;
128
 
129
  asmop = extract_asm_operands (body);
130
  if (asmop == NULL_RTX)
131
    return true;
132
 
133
  for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
134
    if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
135
      return false;
136
 
137
  return true;
138
}
139
 
140
/* Check that X is an insn-body for an `asm' with operands
141
   and that the operands mentioned in it are legitimate.  */
142
 
143
int
144
check_asm_operands (rtx x)
145
{
146
  int noperands;
147
  rtx *operands;
148
  const char **constraints;
149
  int i;
150
 
151
  if (!asm_labels_ok (x))
152
    return 0;
153
 
154
  /* Post-reload, be more strict with things.  */
155
  if (reload_completed)
156
    {
157
      /* ??? Doh!  We've not got the wrapping insn.  Cook one up.  */
158
      extract_insn (make_insn_raw (x));
159
      constrain_operands (1);
160
      return which_alternative >= 0;
161
    }
162
 
163
  noperands = asm_noperands (x);
164
  if (noperands < 0)
165
    return 0;
166
  if (noperands == 0)
167
    return 1;
168
 
169
  operands = XALLOCAVEC (rtx, noperands);
170
  constraints = XALLOCAVEC (const char *, noperands);
171
 
172
  decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
173
 
174
  for (i = 0; i < noperands; i++)
175
    {
176
      const char *c = constraints[i];
177
      if (c[0] == '%')
178
        c++;
179
      if (! asm_operand_ok (operands[i], c, constraints))
180
        return 0;
181
    }
182
 
183
  return 1;
184
}
185
 
186
/* Static data for the next two routines.  */
187
 
188
typedef struct change_t
189
{
190
  rtx object;
191
  int old_code;
192
  rtx *loc;
193
  rtx old;
194
  bool unshare;
195
} change_t;
196
 
197
static change_t *changes;
198
static int changes_allocated;
199
 
200
static int num_changes = 0;
201
 
202
/* Validate a proposed change to OBJECT.  LOC is the location in the rtl
203
   at which NEW_RTX will be placed.  If OBJECT is zero, no validation is done,
204
   the change is simply made.
205
 
206
   Two types of objects are supported:  If OBJECT is a MEM, memory_address_p
207
   will be called with the address and mode as parameters.  If OBJECT is
208
   an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
209
   the change in place.
210
 
211
   IN_GROUP is nonzero if this is part of a group of changes that must be
212
   performed as a group.  In that case, the changes will be stored.  The
213
   function `apply_change_group' will validate and apply the changes.
214
 
215
   If IN_GROUP is zero, this is a single change.  Try to recognize the insn
216
   or validate the memory reference with the change applied.  If the result
217
   is not valid for the machine, suppress the change and return zero.
218
   Otherwise, perform the change and return 1.  */
219
 
220
static bool
221
validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
222
{
223
  rtx old = *loc;
224
 
225
  if (old == new_rtx || rtx_equal_p (old, new_rtx))
226
    return 1;
227
 
228
  gcc_assert (in_group != 0 || num_changes == 0);
229
 
230
  *loc = new_rtx;
231
 
232
  /* Save the information describing this change.  */
233
  if (num_changes >= changes_allocated)
234
    {
235
      if (changes_allocated == 0)
236
        /* This value allows for repeated substitutions inside complex
237
           indexed addresses, or changes in up to 5 insns.  */
238
        changes_allocated = MAX_RECOG_OPERANDS * 5;
239
      else
240
        changes_allocated *= 2;
241
 
242
      changes = XRESIZEVEC (change_t, changes, changes_allocated);
243
    }
244
 
245
  changes[num_changes].object = object;
246
  changes[num_changes].loc = loc;
247
  changes[num_changes].old = old;
248
  changes[num_changes].unshare = unshare;
249
 
250
  if (object && !MEM_P (object))
251
    {
252
      /* Set INSN_CODE to force rerecognition of insn.  Save old code in
253
         case invalid.  */
254
      changes[num_changes].old_code = INSN_CODE (object);
255
      INSN_CODE (object) = -1;
256
    }
257
 
258
  num_changes++;
259
 
260
  /* If we are making a group of changes, return 1.  Otherwise, validate the
261
     change group we made.  */
262
 
263
  if (in_group)
264
    return 1;
265
  else
266
    return apply_change_group ();
267
}
268
 
269
/* Wrapper for validate_change_1 without the UNSHARE argument defaulting
270
   UNSHARE to false.  */
271
 
272
bool
273
validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
274
{
275
  return validate_change_1 (object, loc, new_rtx, in_group, false);
276
}
277
 
278
/* Wrapper for validate_change_1 without the UNSHARE argument defaulting
279
   UNSHARE to true.  */
280
 
281
bool
282
validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
283
{
284
  return validate_change_1 (object, loc, new_rtx, in_group, true);
285
}
286
 
287
 
288
/* Keep X canonicalized if some changes have made it non-canonical; only
289
   modifies the operands of X, not (for example) its code.  Simplifications
290
   are not the job of this routine.
291
 
292
   Return true if anything was changed.  */
293
bool
294
canonicalize_change_group (rtx insn, rtx x)
295
{
296
  if (COMMUTATIVE_P (x)
297
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
298
    {
299
      /* Oops, the caller has made X no longer canonical.
300
         Let's redo the changes in the correct order.  */
301
      rtx tem = XEXP (x, 0);
302
      validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
303
      validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
304
      return true;
305
    }
306
  else
307
    return false;
308
}
309
 
310
 
311
/* This subroutine of apply_change_group verifies whether the changes to INSN
312
   were valid; i.e. whether INSN can still be recognized.  */
313
 
314
int
315
insn_invalid_p (rtx insn)
316
{
317
  rtx pat = PATTERN (insn);
318
  int num_clobbers = 0;
319
  /* If we are before reload and the pattern is a SET, see if we can add
320
     clobbers.  */
321
  int icode = recog (pat, insn,
322
                     (GET_CODE (pat) == SET
323
                      && ! reload_completed && ! reload_in_progress)
324
                     ? &num_clobbers : 0);
325
  int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
326
 
327
 
328
  /* If this is an asm and the operand aren't legal, then fail.  Likewise if
329
     this is not an asm and the insn wasn't recognized.  */
330
  if ((is_asm && ! check_asm_operands (PATTERN (insn)))
331
      || (!is_asm && icode < 0))
332
    return 1;
333
 
334
  /* If we have to add CLOBBERs, fail if we have to add ones that reference
335
     hard registers since our callers can't know if they are live or not.
336
     Otherwise, add them.  */
337
  if (num_clobbers > 0)
338
    {
339
      rtx newpat;
340
 
341
      if (added_clobbers_hard_reg_p (icode))
342
        return 1;
343
 
344
      newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
345
      XVECEXP (newpat, 0, 0) = pat;
346
      add_clobbers (newpat, icode);
347
      PATTERN (insn) = pat = newpat;
348
    }
349
 
350
  /* After reload, verify that all constraints are satisfied.  */
351
  if (reload_completed)
352
    {
353
      extract_insn (insn);
354
 
355
      if (! constrain_operands (1))
356
        return 1;
357
    }
358
 
359
  INSN_CODE (insn) = icode;
360
  return 0;
361
}
362
 
363
/* Return number of changes made and not validated yet.  */
364
int
365
num_changes_pending (void)
366
{
367
  return num_changes;
368
}
369
 
370
/* Tentatively apply the changes numbered NUM and up.
371
   Return 1 if all changes are valid, zero otherwise.  */
372
 
373
int
374
verify_changes (int num)
375
{
376
  int i;
377
  rtx last_validated = NULL_RTX;
378
 
379
  /* The changes have been applied and all INSN_CODEs have been reset to force
380
     rerecognition.
381
 
382
     The changes are valid if we aren't given an object, or if we are
383
     given a MEM and it still is a valid address, or if this is in insn
384
     and it is recognized.  In the latter case, if reload has completed,
385
     we also require that the operands meet the constraints for
386
     the insn.  */
387
 
388
  for (i = num; i < num_changes; i++)
389
    {
390
      rtx object = changes[i].object;
391
 
392
      /* If there is no object to test or if it is the same as the one we
393
         already tested, ignore it.  */
394
      if (object == 0 || object == last_validated)
395
        continue;
396
 
397
      if (MEM_P (object))
398
        {
399
          if (! memory_address_addr_space_p (GET_MODE (object),
400
                                             XEXP (object, 0),
401
                                             MEM_ADDR_SPACE (object)))
402
            break;
403
        }
404
      else if (REG_P (changes[i].old)
405
               && asm_noperands (PATTERN (object)) > 0
406
               && REG_EXPR (changes[i].old) != NULL_TREE
407
               && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
408
               && DECL_REGISTER (REG_EXPR (changes[i].old)))
409
        {
410
          /* Don't allow changes of hard register operands to inline
411
             assemblies if they have been defined as register asm ("x").  */
412
          break;
413
        }
414
      else if (DEBUG_INSN_P (object))
415
        continue;
416
      else if (insn_invalid_p (object))
417
        {
418
          rtx pat = PATTERN (object);
419
 
420
          /* Perhaps we couldn't recognize the insn because there were
421
             extra CLOBBERs at the end.  If so, try to re-recognize
422
             without the last CLOBBER (later iterations will cause each of
423
             them to be eliminated, in turn).  But don't do this if we
424
             have an ASM_OPERAND.  */
425
          if (GET_CODE (pat) == PARALLEL
426
              && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
427
              && asm_noperands (PATTERN (object)) < 0)
428
            {
429
              rtx newpat;
430
 
431
              if (XVECLEN (pat, 0) == 2)
432
                newpat = XVECEXP (pat, 0, 0);
433
              else
434
                {
435
                  int j;
436
 
437
                  newpat
438
                    = gen_rtx_PARALLEL (VOIDmode,
439
                                        rtvec_alloc (XVECLEN (pat, 0) - 1));
440
                  for (j = 0; j < XVECLEN (newpat, 0); j++)
441
                    XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
442
                }
443
 
444
              /* Add a new change to this group to replace the pattern
445
                 with this new pattern.  Then consider this change
446
                 as having succeeded.  The change we added will
447
                 cause the entire call to fail if things remain invalid.
448
 
449
                 Note that this can lose if a later change than the one
450
                 we are processing specified &XVECEXP (PATTERN (object), 0, X)
451
                 but this shouldn't occur.  */
452
 
453
              validate_change (object, &PATTERN (object), newpat, 1);
454
              continue;
455
            }
456
          else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
457
                   || GET_CODE (pat) == VAR_LOCATION)
458
            /* If this insn is a CLOBBER or USE, it is always valid, but is
459
               never recognized.  */
460
            continue;
461
          else
462
            break;
463
        }
464
      last_validated = object;
465
    }
466
 
467
  return (i == num_changes);
468
}
469
 
470
/* A group of changes has previously been issued with validate_change
471
   and verified with verify_changes.  Call df_insn_rescan for each of
472
   the insn changed and clear num_changes.  */
473
 
474
void
475
confirm_change_group (void)
476
{
477
  int i;
478
  rtx last_object = NULL;
479
 
480
  for (i = 0; i < num_changes; i++)
481
    {
482
      rtx object = changes[i].object;
483
 
484
      if (changes[i].unshare)
485
        *changes[i].loc = copy_rtx (*changes[i].loc);
486
 
487
      /* Avoid unnecessary rescanning when multiple changes to same instruction
488
         are made.  */
489
      if (object)
490
        {
491
          if (object != last_object && last_object && INSN_P (last_object))
492
            df_insn_rescan (last_object);
493
          last_object = object;
494
        }
495
    }
496
 
497
  if (last_object && INSN_P (last_object))
498
    df_insn_rescan (last_object);
499
  num_changes = 0;
500
}
501
 
502
/* Apply a group of changes previously issued with `validate_change'.
503
   If all changes are valid, call confirm_change_group and return 1,
504
   otherwise, call cancel_changes and return 0.  */
505
 
506
int
507
apply_change_group (void)
508
{
509
  if (verify_changes (0))
510
    {
511
      confirm_change_group ();
512
      return 1;
513
    }
514
  else
515
    {
516
      cancel_changes (0);
517
      return 0;
518
    }
519
}
520
 
521
 
522
/* Return the number of changes so far in the current group.  */
523
 
524
int
525
num_validated_changes (void)
526
{
527
  return num_changes;
528
}
529
 
530
/* Retract the changes numbered NUM and up.  */
531
 
532
void
533
cancel_changes (int num)
534
{
535
  int i;
536
 
537
  /* Back out all the changes.  Do this in the opposite order in which
538
     they were made.  */
539
  for (i = num_changes - 1; i >= num; i--)
540
    {
541
      *changes[i].loc = changes[i].old;
542
      if (changes[i].object && !MEM_P (changes[i].object))
543
        INSN_CODE (changes[i].object) = changes[i].old_code;
544
    }
545
  num_changes = num;
546
}
547
 
548
/* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
549
   rtx.  */
550
 
551
static void
552
simplify_while_replacing (rtx *loc, rtx to, rtx object,
553
                          enum machine_mode op0_mode)
554
{
555
  rtx x = *loc;
556
  enum rtx_code code = GET_CODE (x);
557
  rtx new_rtx;
558
 
559
  if (SWAPPABLE_OPERANDS_P (x)
560
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
561
    {
562
      validate_unshare_change (object, loc,
563
                               gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
564
                                               : swap_condition (code),
565
                                               GET_MODE (x), XEXP (x, 1),
566
                                               XEXP (x, 0)), 1);
567
      x = *loc;
568
      code = GET_CODE (x);
569
    }
570
 
571
  switch (code)
572
    {
573
    case PLUS:
574
      /* If we have a PLUS whose second operand is now a CONST_INT, use
575
         simplify_gen_binary to try to simplify it.
576
         ??? We may want later to remove this, once simplification is
577
         separated from this function.  */
578
      if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
579
        validate_change (object, loc,
580
                         simplify_gen_binary
581
                         (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
582
      break;
583
    case MINUS:
584
      if (CONST_INT_P (XEXP (x, 1))
585
          || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
586
        validate_change (object, loc,
587
                         simplify_gen_binary
588
                         (PLUS, GET_MODE (x), XEXP (x, 0),
589
                          simplify_gen_unary (NEG,
590
                                              GET_MODE (x), XEXP (x, 1),
591
                                              GET_MODE (x))), 1);
592
      break;
593
    case ZERO_EXTEND:
594
    case SIGN_EXTEND:
595
      if (GET_MODE (XEXP (x, 0)) == VOIDmode)
596
        {
597
          new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
598
                                    op0_mode);
599
          /* If any of the above failed, substitute in something that
600
             we know won't be recognized.  */
601
          if (!new_rtx)
602
            new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
603
          validate_change (object, loc, new_rtx, 1);
604
        }
605
      break;
606
    case SUBREG:
607
      /* All subregs possible to simplify should be simplified.  */
608
      new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
609
                             SUBREG_BYTE (x));
610
 
611
      /* Subregs of VOIDmode operands are incorrect.  */
612
      if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
613
        new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
614
      if (new_rtx)
615
        validate_change (object, loc, new_rtx, 1);
616
      break;
617
    case ZERO_EXTRACT:
618
    case SIGN_EXTRACT:
619
      /* If we are replacing a register with memory, try to change the memory
620
         to be the mode required for memory in extract operations (this isn't
621
         likely to be an insertion operation; if it was, nothing bad will
622
         happen, we might just fail in some cases).  */
623
 
624
      if (MEM_P (XEXP (x, 0))
625
          && CONST_INT_P (XEXP (x, 1))
626
          && CONST_INT_P (XEXP (x, 2))
627
          && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
628
          && !MEM_VOLATILE_P (XEXP (x, 0)))
629
        {
630
          enum machine_mode wanted_mode = VOIDmode;
631
          enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
632
          int pos = INTVAL (XEXP (x, 2));
633
 
634
          if (GET_CODE (x) == ZERO_EXTRACT)
635
            {
636
              enum machine_mode new_mode
637
                = mode_for_extraction (EP_extzv, 1);
638
              if (new_mode != MAX_MACHINE_MODE)
639
                wanted_mode = new_mode;
640
            }
641
          else if (GET_CODE (x) == SIGN_EXTRACT)
642
            {
643
              enum machine_mode new_mode
644
                = mode_for_extraction (EP_extv, 1);
645
              if (new_mode != MAX_MACHINE_MODE)
646
                wanted_mode = new_mode;
647
            }
648
 
649
          /* If we have a narrower mode, we can do something.  */
650
          if (wanted_mode != VOIDmode
651
              && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
652
            {
653
              int offset = pos / BITS_PER_UNIT;
654
              rtx newmem;
655
 
656
              /* If the bytes and bits are counted differently, we
657
                 must adjust the offset.  */
658
              if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
659
                offset =
660
                  (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
661
                   offset);
662
 
663
              gcc_assert (GET_MODE_PRECISION (wanted_mode)
664
                          == GET_MODE_BITSIZE (wanted_mode));
665
              pos %= GET_MODE_BITSIZE (wanted_mode);
666
 
667
              newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
668
 
669
              validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
670
              validate_change (object, &XEXP (x, 0), newmem, 1);
671
            }
672
        }
673
 
674
      break;
675
 
676
    default:
677
      break;
678
    }
679
}
680
 
681
/* Replace every occurrence of FROM in X with TO.  Mark each change with
682
   validate_change passing OBJECT.  */
683
 
684
static void
685
validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
686
                        bool simplify)
687
{
688
  int i, j;
689
  const char *fmt;
690
  rtx x = *loc;
691
  enum rtx_code code;
692
  enum machine_mode op0_mode = VOIDmode;
693
  int prev_changes = num_changes;
694
 
695
  if (!x)
696
    return;
697
 
698
  code = GET_CODE (x);
699
  fmt = GET_RTX_FORMAT (code);
700
  if (fmt[0] == 'e')
701
    op0_mode = GET_MODE (XEXP (x, 0));
702
 
703
  /* X matches FROM if it is the same rtx or they are both referring to the
704
     same register in the same mode.  Avoid calling rtx_equal_p unless the
705
     operands look similar.  */
706
 
707
  if (x == from
708
      || (REG_P (x) && REG_P (from)
709
          && GET_MODE (x) == GET_MODE (from)
710
          && REGNO (x) == REGNO (from))
711
      || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
712
          && rtx_equal_p (x, from)))
713
    {
714
      validate_unshare_change (object, loc, to, 1);
715
      return;
716
    }
717
 
718
  /* Call ourself recursively to perform the replacements.
719
     We must not replace inside already replaced expression, otherwise we
720
     get infinite recursion for replacements like (reg X)->(subreg (reg X))
721
     done by regmove, so we must special case shared ASM_OPERANDS.  */
722
 
723
  if (GET_CODE (x) == PARALLEL)
724
    {
725
      for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
726
        {
727
          if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
728
              && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
729
            {
730
              /* Verify that operands are really shared.  */
731
              gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
732
                          == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
733
                                                              (x, 0, j))));
734
              validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
735
                                      from, to, object, simplify);
736
            }
737
          else
738
            validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
739
                                    simplify);
740
        }
741
    }
742
  else
743
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
744
      {
745
        if (fmt[i] == 'e')
746
          validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
747
        else if (fmt[i] == 'E')
748
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
749
            validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
750
                                    simplify);
751
      }
752
 
753
  /* If we didn't substitute, there is nothing more to do.  */
754
  if (num_changes == prev_changes)
755
    return;
756
 
757
  /* Allow substituted expression to have different mode.  This is used by
758
     regmove to change mode of pseudo register.  */
759
  if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
760
    op0_mode = GET_MODE (XEXP (x, 0));
761
 
762
  /* Do changes needed to keep rtx consistent.  Don't do any other
763
     simplifications, as it is not our job.  */
764
  if (simplify)
765
    simplify_while_replacing (loc, to, object, op0_mode);
766
}
767
 
768
/* Try replacing every occurrence of FROM in subexpression LOC of INSN
769
   with TO.  After all changes have been made, validate by seeing
770
   if INSN is still valid.  */
771
 
772
int
773
validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
774
{
775
  validate_replace_rtx_1 (loc, from, to, insn, true);
776
  return apply_change_group ();
777
}
778
 
779
/* Try replacing every occurrence of FROM in INSN with TO.  After all
780
   changes have been made, validate by seeing if INSN is still valid.  */
781
 
782
int
783
validate_replace_rtx (rtx from, rtx to, rtx insn)
784
{
785
  validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
786
  return apply_change_group ();
787
}
788
 
789
/* Try replacing every occurrence of FROM in WHERE with TO.  Assume that WHERE
790
   is a part of INSN.  After all changes have been made, validate by seeing if
791
   INSN is still valid.
792
   validate_replace_rtx (from, to, insn) is equivalent to
793
   validate_replace_rtx_part (from, to, &PATTERN (insn), insn).  */
794
 
795
int
796
validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
797
{
798
  validate_replace_rtx_1 (where, from, to, insn, true);
799
  return apply_change_group ();
800
}
801
 
802
/* Same as above, but do not simplify rtx afterwards.  */
803
int
804
validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
805
                                      rtx insn)
806
{
807
  validate_replace_rtx_1 (where, from, to, insn, false);
808
  return apply_change_group ();
809
 
810
}
811
 
812
/* Try replacing every occurrence of FROM in INSN with TO.  This also
813
   will replace in REG_EQUAL and REG_EQUIV notes.  */
814
 
815
void
816
validate_replace_rtx_group (rtx from, rtx to, rtx insn)
817
{
818
  rtx note;
819
  validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
820
  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
821
    if (REG_NOTE_KIND (note) == REG_EQUAL
822
        || REG_NOTE_KIND (note) == REG_EQUIV)
823
      validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
824
}
825
 
826
/* Function called by note_uses to replace used subexpressions.  */
827
struct validate_replace_src_data
828
{
829
  rtx from;                     /* Old RTX */
830
  rtx to;                       /* New RTX */
831
  rtx insn;                     /* Insn in which substitution is occurring.  */
832
};
833
 
834
static void
835
validate_replace_src_1 (rtx *x, void *data)
836
{
837
  struct validate_replace_src_data *d
838
    = (struct validate_replace_src_data *) data;
839
 
840
  validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
841
}
842
 
843
/* Try replacing every occurrence of FROM in INSN with TO, avoiding
844
   SET_DESTs.  */
845
 
846
void
847
validate_replace_src_group (rtx from, rtx to, rtx insn)
848
{
849
  struct validate_replace_src_data d;
850
 
851
  d.from = from;
852
  d.to = to;
853
  d.insn = insn;
854
  note_uses (&PATTERN (insn), validate_replace_src_1, &d);
855
}
856
 
857
/* Try simplify INSN.
858
   Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
859
   pattern and return true if something was simplified.  */
860
 
861
bool
862
validate_simplify_insn (rtx insn)
863
{
864
  int i;
865
  rtx pat = NULL;
866
  rtx newpat = NULL;
867
 
868
  pat = PATTERN (insn);
869
 
870
  if (GET_CODE (pat) == SET)
871
    {
872
      newpat = simplify_rtx (SET_SRC (pat));
873
      if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
874
        validate_change (insn, &SET_SRC (pat), newpat, 1);
875
      newpat = simplify_rtx (SET_DEST (pat));
876
      if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
877
        validate_change (insn, &SET_DEST (pat), newpat, 1);
878
    }
879
  else if (GET_CODE (pat) == PARALLEL)
880
    for (i = 0; i < XVECLEN (pat, 0); i++)
881
      {
882
        rtx s = XVECEXP (pat, 0, i);
883
 
884
        if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
885
          {
886
            newpat = simplify_rtx (SET_SRC (s));
887
            if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
888
              validate_change (insn, &SET_SRC (s), newpat, 1);
889
            newpat = simplify_rtx (SET_DEST (s));
890
            if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
891
              validate_change (insn, &SET_DEST (s), newpat, 1);
892
          }
893
      }
894
  return ((num_changes_pending () > 0) && (apply_change_group () > 0));
895
}
896
 
897
#ifdef HAVE_cc0
898
/* Return 1 if the insn using CC0 set by INSN does not contain
899
   any ordered tests applied to the condition codes.
900
   EQ and NE tests do not count.  */
901
 
902
int
903
next_insn_tests_no_inequality (rtx insn)
904
{
905
  rtx next = next_cc0_user (insn);
906
 
907
  /* If there is no next insn, we have to take the conservative choice.  */
908
  if (next == 0)
909
    return 0;
910
 
911
  return (INSN_P (next)
912
          && ! inequality_comparisons_p (PATTERN (next)));
913
}
914
#endif
915
 
916
/* Return 1 if OP is a valid general operand for machine mode MODE.
917
   This is either a register reference, a memory reference,
918
   or a constant.  In the case of a memory reference, the address
919
   is checked for general validity for the target machine.
920
 
921
   Register and memory references must have mode MODE in order to be valid,
922
   but some constants have no machine mode and are valid for any mode.
923
 
924
   If MODE is VOIDmode, OP is checked for validity for whatever mode
925
   it has.
926
 
927
   The main use of this function is as a predicate in match_operand
928
   expressions in the machine description.  */
929
 
930
int
931
general_operand (rtx op, enum machine_mode mode)
932
{
933
  enum rtx_code code = GET_CODE (op);
934
 
935
  if (mode == VOIDmode)
936
    mode = GET_MODE (op);
937
 
938
  /* Don't accept CONST_INT or anything similar
939
     if the caller wants something floating.  */
940
  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
941
      && GET_MODE_CLASS (mode) != MODE_INT
942
      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
943
    return 0;
944
 
945
  if (CONST_INT_P (op)
946
      && mode != VOIDmode
947
      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
948
    return 0;
949
 
950
  if (CONSTANT_P (op))
951
    return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
952
             || mode == VOIDmode)
953
            && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
954
            && targetm.legitimate_constant_p (mode == VOIDmode
955
                                              ? GET_MODE (op)
956
                                              : mode, op));
957
 
958
  /* Except for certain constants with VOIDmode, already checked for,
959
     OP's mode must match MODE if MODE specifies a mode.  */
960
 
961
  if (GET_MODE (op) != mode)
962
    return 0;
963
 
964
  if (code == SUBREG)
965
    {
966
      rtx sub = SUBREG_REG (op);
967
 
968
#ifdef INSN_SCHEDULING
969
      /* On machines that have insn scheduling, we want all memory
970
         reference to be explicit, so outlaw paradoxical SUBREGs.
971
         However, we must allow them after reload so that they can
972
         get cleaned up by cleanup_subreg_operands.  */
973
      if (!reload_completed && MEM_P (sub)
974
          && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
975
        return 0;
976
#endif
977
      /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
978
         may result in incorrect reference.  We should simplify all valid
979
         subregs of MEM anyway.  But allow this after reload because we
980
         might be called from cleanup_subreg_operands.
981
 
982
         ??? This is a kludge.  */
983
      if (!reload_completed && SUBREG_BYTE (op) != 0
984
          && MEM_P (sub))
985
        return 0;
986
 
987
      /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
988
         create such rtl, and we must reject it.  */
989
      if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
990
          && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
991
        return 0;
992
 
993
      op = sub;
994
      code = GET_CODE (op);
995
    }
996
 
997
  if (code == REG)
998
    return (REGNO (op) >= FIRST_PSEUDO_REGISTER
999
            || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1000
 
1001
  if (code == MEM)
1002
    {
1003
      rtx y = XEXP (op, 0);
1004
 
1005
      if (! volatile_ok && MEM_VOLATILE_P (op))
1006
        return 0;
1007
 
1008
      /* Use the mem's mode, since it will be reloaded thus.  */
1009
      if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1010
        return 1;
1011
    }
1012
 
1013
  return 0;
1014
}
1015
 
1016
/* Return 1 if OP is a valid memory address for a memory reference
1017
   of mode MODE.
1018
 
1019
   The main use of this function is as a predicate in match_operand
1020
   expressions in the machine description.  */
1021
 
1022
int
1023
address_operand (rtx op, enum machine_mode mode)
1024
{
1025
  return memory_address_p (mode, op);
1026
}
1027
 
1028
/* Return 1 if OP is a register reference of mode MODE.
1029
   If MODE is VOIDmode, accept a register in any mode.
1030
 
1031
   The main use of this function is as a predicate in match_operand
1032
   expressions in the machine description.  */
1033
 
1034
int
1035
register_operand (rtx op, enum machine_mode mode)
1036
{
1037
  if (GET_MODE (op) != mode && mode != VOIDmode)
1038
    return 0;
1039
 
1040
  if (GET_CODE (op) == SUBREG)
1041
    {
1042
      rtx sub = SUBREG_REG (op);
1043
 
1044
      /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1045
         because it is guaranteed to be reloaded into one.
1046
         Just make sure the MEM is valid in itself.
1047
         (Ideally, (SUBREG (MEM)...) should not exist after reload,
1048
         but currently it does result from (SUBREG (REG)...) where the
1049
         reg went on the stack.)  */
1050
      if (! reload_completed && MEM_P (sub))
1051
        return general_operand (op, mode);
1052
 
1053
#ifdef CANNOT_CHANGE_MODE_CLASS
1054
      if (REG_P (sub)
1055
          && REGNO (sub) < FIRST_PSEUDO_REGISTER
1056
          && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1057
          && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1058
          && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1059
        return 0;
1060
#endif
1061
 
1062
      /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
1063
         create such rtl, and we must reject it.  */
1064
      if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1065
          && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1066
        return 0;
1067
 
1068
      op = sub;
1069
    }
1070
 
1071
  return (REG_P (op)
1072
          && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1073
              || in_hard_reg_set_p (operand_reg_set,
1074
                                    GET_MODE (op), REGNO (op))));
1075
}
1076
 
1077
/* Return 1 for a register in Pmode; ignore the tested mode.  */
1078
 
1079
int
1080
pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1081
{
1082
  return register_operand (op, Pmode);
1083
}
1084
 
1085
/* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1086
   or a hard register.  */
1087
 
1088
int
1089
scratch_operand (rtx op, enum machine_mode mode)
1090
{
1091
  if (GET_MODE (op) != mode && mode != VOIDmode)
1092
    return 0;
1093
 
1094
  return (GET_CODE (op) == SCRATCH
1095
          || (REG_P (op)
1096
              && REGNO (op) < FIRST_PSEUDO_REGISTER));
1097
}
1098
 
1099
/* Return 1 if OP is a valid immediate operand for mode MODE.
1100
 
1101
   The main use of this function is as a predicate in match_operand
1102
   expressions in the machine description.  */
1103
 
1104
int
1105
immediate_operand (rtx op, enum machine_mode mode)
1106
{
1107
  /* Don't accept CONST_INT or anything similar
1108
     if the caller wants something floating.  */
1109
  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1110
      && GET_MODE_CLASS (mode) != MODE_INT
1111
      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1112
    return 0;
1113
 
1114
  if (CONST_INT_P (op)
1115
      && mode != VOIDmode
1116
      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1117
    return 0;
1118
 
1119
  return (CONSTANT_P (op)
1120
          && (GET_MODE (op) == mode || mode == VOIDmode
1121
              || GET_MODE (op) == VOIDmode)
1122
          && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1123
          && targetm.legitimate_constant_p (mode == VOIDmode
1124
                                            ? GET_MODE (op)
1125
                                            : mode, op));
1126
}
1127
 
1128
/* Returns 1 if OP is an operand that is a CONST_INT.  */
1129
 
1130
int
1131
const_int_operand (rtx op, enum machine_mode mode)
1132
{
1133
  if (!CONST_INT_P (op))
1134
    return 0;
1135
 
1136
  if (mode != VOIDmode
1137
      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1138
    return 0;
1139
 
1140
  return 1;
1141
}
1142
 
1143
/* Returns 1 if OP is an operand that is a constant integer or constant
1144
   floating-point number.  */
1145
 
1146
int
1147
const_double_operand (rtx op, enum machine_mode mode)
1148
{
1149
  /* Don't accept CONST_INT or anything similar
1150
     if the caller wants something floating.  */
1151
  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1152
      && GET_MODE_CLASS (mode) != MODE_INT
1153
      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1154
    return 0;
1155
 
1156
  return ((GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op))
1157
          && (mode == VOIDmode || GET_MODE (op) == mode
1158
              || GET_MODE (op) == VOIDmode));
1159
}
1160
 
1161
/* Return 1 if OP is a general operand that is not an immediate operand.  */
1162
 
1163
int
1164
nonimmediate_operand (rtx op, enum machine_mode mode)
1165
{
1166
  return (general_operand (op, mode) && ! CONSTANT_P (op));
1167
}
1168
 
1169
/* Return 1 if OP is a register reference or immediate value of mode MODE.  */
1170
 
1171
int
1172
nonmemory_operand (rtx op, enum machine_mode mode)
1173
{
1174
  if (CONSTANT_P (op))
1175
    return immediate_operand (op, mode);
1176
 
1177
  if (GET_MODE (op) != mode && mode != VOIDmode)
1178
    return 0;
1179
 
1180
  if (GET_CODE (op) == SUBREG)
1181
    {
1182
      /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1183
         because it is guaranteed to be reloaded into one.
1184
         Just make sure the MEM is valid in itself.
1185
         (Ideally, (SUBREG (MEM)...) should not exist after reload,
1186
         but currently it does result from (SUBREG (REG)...) where the
1187
         reg went on the stack.)  */
1188
      if (! reload_completed && MEM_P (SUBREG_REG (op)))
1189
        return general_operand (op, mode);
1190
      op = SUBREG_REG (op);
1191
    }
1192
 
1193
  return (REG_P (op)
1194
          && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1195
              || in_hard_reg_set_p (operand_reg_set,
1196
                                    GET_MODE (op), REGNO (op))));
1197
}
1198
 
1199
/* Return 1 if OP is a valid operand that stands for pushing a
1200
   value of mode MODE onto the stack.
1201
 
1202
   The main use of this function is as a predicate in match_operand
1203
   expressions in the machine description.  */
1204
 
1205
int
1206
push_operand (rtx op, enum machine_mode mode)
1207
{
1208
  unsigned int rounded_size = GET_MODE_SIZE (mode);
1209
 
1210
#ifdef PUSH_ROUNDING
1211
  rounded_size = PUSH_ROUNDING (rounded_size);
1212
#endif
1213
 
1214
  if (!MEM_P (op))
1215
    return 0;
1216
 
1217
  if (mode != VOIDmode && GET_MODE (op) != mode)
1218
    return 0;
1219
 
1220
  op = XEXP (op, 0);
1221
 
1222
  if (rounded_size == GET_MODE_SIZE (mode))
1223
    {
1224
      if (GET_CODE (op) != STACK_PUSH_CODE)
1225
        return 0;
1226
    }
1227
  else
1228
    {
1229
      if (GET_CODE (op) != PRE_MODIFY
1230
          || GET_CODE (XEXP (op, 1)) != PLUS
1231
          || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1232
          || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1233
#ifdef STACK_GROWS_DOWNWARD
1234
          || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1235
#else
1236
          || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1237
#endif
1238
          )
1239
        return 0;
1240
    }
1241
 
1242
  return XEXP (op, 0) == stack_pointer_rtx;
1243
}
1244
 
1245
/* Return 1 if OP is a valid operand that stands for popping a
1246
   value of mode MODE off the stack.
1247
 
1248
   The main use of this function is as a predicate in match_operand
1249
   expressions in the machine description.  */
1250
 
1251
int
1252
pop_operand (rtx op, enum machine_mode mode)
1253
{
1254
  if (!MEM_P (op))
1255
    return 0;
1256
 
1257
  if (mode != VOIDmode && GET_MODE (op) != mode)
1258
    return 0;
1259
 
1260
  op = XEXP (op, 0);
1261
 
1262
  if (GET_CODE (op) != STACK_POP_CODE)
1263
    return 0;
1264
 
1265
  return XEXP (op, 0) == stack_pointer_rtx;
1266
}
1267
 
1268
/* Return 1 if ADDR is a valid memory address
1269
   for mode MODE in address space AS.  */
1270
 
1271
int
1272
memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1273
                             rtx addr, addr_space_t as)
1274
{
1275
#ifdef GO_IF_LEGITIMATE_ADDRESS
1276
  gcc_assert (ADDR_SPACE_GENERIC_P (as));
1277
  GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1278
  return 0;
1279
 
1280
 win:
1281
  return 1;
1282
#else
1283
  return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1284
#endif
1285
}
1286
 
1287
/* Return 1 if OP is a valid memory reference with mode MODE,
1288
   including a valid address.
1289
 
1290
   The main use of this function is as a predicate in match_operand
1291
   expressions in the machine description.  */
1292
 
1293
int
1294
memory_operand (rtx op, enum machine_mode mode)
1295
{
1296
  rtx inner;
1297
 
1298
  if (! reload_completed)
1299
    /* Note that no SUBREG is a memory operand before end of reload pass,
1300
       because (SUBREG (MEM...)) forces reloading into a register.  */
1301
    return MEM_P (op) && general_operand (op, mode);
1302
 
1303
  if (mode != VOIDmode && GET_MODE (op) != mode)
1304
    return 0;
1305
 
1306
  inner = op;
1307
  if (GET_CODE (inner) == SUBREG)
1308
    inner = SUBREG_REG (inner);
1309
 
1310
  return (MEM_P (inner) && general_operand (op, mode));
1311
}
1312
 
1313
/* Return 1 if OP is a valid indirect memory reference with mode MODE;
1314
   that is, a memory reference whose address is a general_operand.  */
1315
 
1316
int
1317
indirect_operand (rtx op, enum machine_mode mode)
1318
{
1319
  /* Before reload, a SUBREG isn't in memory (see memory_operand, above).  */
1320
  if (! reload_completed
1321
      && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1322
    {
1323
      int offset = SUBREG_BYTE (op);
1324
      rtx inner = SUBREG_REG (op);
1325
 
1326
      if (mode != VOIDmode && GET_MODE (op) != mode)
1327
        return 0;
1328
 
1329
      /* The only way that we can have a general_operand as the resulting
1330
         address is if OFFSET is zero and the address already is an operand
1331
         or if the address is (plus Y (const_int -OFFSET)) and Y is an
1332
         operand.  */
1333
 
1334
      return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1335
              || (GET_CODE (XEXP (inner, 0)) == PLUS
1336
                  && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1337
                  && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1338
                  && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1339
    }
1340
 
1341
  return (MEM_P (op)
1342
          && memory_operand (op, mode)
1343
          && general_operand (XEXP (op, 0), Pmode));
1344
}
1345
 
1346
/* Return 1 if this is an ordered comparison operator (not including
1347
   ORDERED and UNORDERED).  */
1348
 
1349
int
1350
ordered_comparison_operator (rtx op, enum machine_mode mode)
1351
{
1352
  if (mode != VOIDmode && GET_MODE (op) != mode)
1353
    return false;
1354
  switch (GET_CODE (op))
1355
    {
1356
    case EQ:
1357
    case NE:
1358
    case LT:
1359
    case LTU:
1360
    case LE:
1361
    case LEU:
1362
    case GT:
1363
    case GTU:
1364
    case GE:
1365
    case GEU:
1366
      return true;
1367
    default:
1368
      return false;
1369
    }
1370
}
1371
 
1372
/* Return 1 if this is a comparison operator.  This allows the use of
1373
   MATCH_OPERATOR to recognize all the branch insns.  */
1374
 
1375
int
1376
comparison_operator (rtx op, enum machine_mode mode)
1377
{
1378
  return ((mode == VOIDmode || GET_MODE (op) == mode)
1379
          && COMPARISON_P (op));
1380
}
1381
 
1382
/* If BODY is an insn body that uses ASM_OPERANDS, return it.  */
1383
 
1384
rtx
1385
extract_asm_operands (rtx body)
1386
{
1387
  rtx tmp;
1388
  switch (GET_CODE (body))
1389
    {
1390
    case ASM_OPERANDS:
1391
      return body;
1392
 
1393
    case SET:
1394
      /* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
1395
      tmp = SET_SRC (body);
1396
      if (GET_CODE (tmp) == ASM_OPERANDS)
1397
        return tmp;
1398
      break;
1399
 
1400
    case PARALLEL:
1401
      tmp = XVECEXP (body, 0, 0);
1402
      if (GET_CODE (tmp) == ASM_OPERANDS)
1403
        return tmp;
1404
      if (GET_CODE (tmp) == SET)
1405
        {
1406
          tmp = SET_SRC (tmp);
1407
          if (GET_CODE (tmp) == ASM_OPERANDS)
1408
            return tmp;
1409
        }
1410
      break;
1411
 
1412
    default:
1413
      break;
1414
    }
1415
  return NULL;
1416
}
1417
 
1418
/* If BODY is an insn body that uses ASM_OPERANDS,
1419
   return the number of operands (both input and output) in the insn.
1420
   Otherwise return -1.  */
1421
 
1422
int
1423
asm_noperands (const_rtx body)
1424
{
1425
  rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1426
  int n_sets = 0;
1427
 
1428
  if (asm_op == NULL)
1429
    return -1;
1430
 
1431
  if (GET_CODE (body) == SET)
1432
    n_sets = 1;
1433
  else if (GET_CODE (body) == PARALLEL)
1434
    {
1435
      int i;
1436
      if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1437
        {
1438
          /* Multiple output operands, or 1 output plus some clobbers:
1439
             body is
1440
             [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
1441
          /* Count backwards through CLOBBERs to determine number of SETs.  */
1442
          for (i = XVECLEN (body, 0); i > 0; i--)
1443
            {
1444
              if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1445
                break;
1446
              if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1447
                return -1;
1448
            }
1449
 
1450
          /* N_SETS is now number of output operands.  */
1451
          n_sets = i;
1452
 
1453
          /* Verify that all the SETs we have
1454
             came from a single original asm_operands insn
1455
             (so that invalid combinations are blocked).  */
1456
          for (i = 0; i < n_sets; i++)
1457
            {
1458
              rtx elt = XVECEXP (body, 0, i);
1459
              if (GET_CODE (elt) != SET)
1460
                return -1;
1461
              if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1462
                return -1;
1463
              /* If these ASM_OPERANDS rtx's came from different original insns
1464
                 then they aren't allowed together.  */
1465
              if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1466
                  != ASM_OPERANDS_INPUT_VEC (asm_op))
1467
                return -1;
1468
            }
1469
        }
1470
      else
1471
        {
1472
          /* 0 outputs, but some clobbers:
1473
             body is [(asm_operands ...) (clobber (reg ...))...].  */
1474
          /* Make sure all the other parallel things really are clobbers.  */
1475
          for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1476
            if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1477
              return -1;
1478
        }
1479
    }
1480
 
1481
  return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1482
          + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1483
}
1484
 
1485
/* Assuming BODY is an insn body that uses ASM_OPERANDS,
1486
   copy its operands (both input and output) into the vector OPERANDS,
1487
   the locations of the operands within the insn into the vector OPERAND_LOCS,
1488
   and the constraints for the operands into CONSTRAINTS.
1489
   Write the modes of the operands into MODES.
1490
   Return the assembler-template.
1491
 
1492
   If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1493
   we don't store that info.  */
1494
 
1495
const char *
1496
decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1497
                     const char **constraints, enum machine_mode *modes,
1498
                     location_t *loc)
1499
{
1500
  int nbase = 0, n, i;
1501
  rtx asmop;
1502
 
1503
  switch (GET_CODE (body))
1504
    {
1505
    case ASM_OPERANDS:
1506
      /* Zero output asm: BODY is (asm_operands ...).  */
1507
      asmop = body;
1508
      break;
1509
 
1510
    case SET:
1511
      /* Single output asm: BODY is (set OUTPUT (asm_operands ...)).  */
1512
      asmop = SET_SRC (body);
1513
 
1514
      /* The output is in the SET.
1515
         Its constraint is in the ASM_OPERANDS itself.  */
1516
      if (operands)
1517
        operands[0] = SET_DEST (body);
1518
      if (operand_locs)
1519
        operand_locs[0] = &SET_DEST (body);
1520
      if (constraints)
1521
        constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1522
      if (modes)
1523
        modes[0] = GET_MODE (SET_DEST (body));
1524
      nbase = 1;
1525
      break;
1526
 
1527
    case PARALLEL:
1528
      {
1529
        int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
1530
 
1531
        asmop = XVECEXP (body, 0, 0);
1532
        if (GET_CODE (asmop) == SET)
1533
          {
1534
            asmop = SET_SRC (asmop);
1535
 
1536
            /* At least one output, plus some CLOBBERs.  The outputs are in
1537
               the SETs.  Their constraints are in the ASM_OPERANDS itself.  */
1538
            for (i = 0; i < nparallel; i++)
1539
              {
1540
                if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1541
                  break;                /* Past last SET */
1542
                if (operands)
1543
                  operands[i] = SET_DEST (XVECEXP (body, 0, i));
1544
                if (operand_locs)
1545
                  operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1546
                if (constraints)
1547
                  constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1548
                if (modes)
1549
                  modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1550
              }
1551
            nbase = i;
1552
          }
1553
        break;
1554
      }
1555
 
1556
    default:
1557
      gcc_unreachable ();
1558
    }
1559
 
1560
  n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1561
  for (i = 0; i < n; i++)
1562
    {
1563
      if (operand_locs)
1564
        operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1565
      if (operands)
1566
        operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1567
      if (constraints)
1568
        constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1569
      if (modes)
1570
        modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1571
    }
1572
  nbase += n;
1573
 
1574
  n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1575
  for (i = 0; i < n; i++)
1576
    {
1577
      if (operand_locs)
1578
        operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1579
      if (operands)
1580
        operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1581
      if (constraints)
1582
        constraints[nbase + i] = "";
1583
      if (modes)
1584
        modes[nbase + i] = Pmode;
1585
    }
1586
 
1587
  if (loc)
1588
    *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1589
 
1590
  return ASM_OPERANDS_TEMPLATE (asmop);
1591
}
1592
 
1593
/* Check if an asm_operand matches its constraints.
1594
   Return > 0 if ok, = 0 if bad, < 0 if inconclusive.  */
1595
 
1596
int
1597
asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1598
{
1599
  int result = 0;
1600
#ifdef AUTO_INC_DEC
1601
  bool incdec_ok = false;
1602
#endif
1603
 
1604
  /* Use constrain_operands after reload.  */
1605
  gcc_assert (!reload_completed);
1606
 
1607
  /* Empty constraint string is the same as "X,...,X", i.e. X for as
1608
     many alternatives as required to match the other operands.  */
1609
  if (*constraint == '\0')
1610
    result = 1;
1611
 
1612
  while (*constraint)
1613
    {
1614
      char c = *constraint;
1615
      int len;
1616
      switch (c)
1617
        {
1618
        case ',':
1619
          constraint++;
1620
          continue;
1621
        case '=':
1622
        case '+':
1623
        case '*':
1624
        case '%':
1625
        case '!':
1626
        case '#':
1627
        case '&':
1628
        case '?':
1629
          break;
1630
 
1631
        case '0': case '1': case '2': case '3': case '4':
1632
        case '5': case '6': case '7': case '8': case '9':
1633
          /* If caller provided constraints pointer, look up
1634
             the maching constraint.  Otherwise, our caller should have
1635
             given us the proper matching constraint, but we can't
1636
             actually fail the check if they didn't.  Indicate that
1637
             results are inconclusive.  */
1638
          if (constraints)
1639
            {
1640
              char *end;
1641
              unsigned long match;
1642
 
1643
              match = strtoul (constraint, &end, 10);
1644
              if (!result)
1645
                result = asm_operand_ok (op, constraints[match], NULL);
1646
              constraint = (const char *) end;
1647
            }
1648
          else
1649
            {
1650
              do
1651
                constraint++;
1652
              while (ISDIGIT (*constraint));
1653
              if (! result)
1654
                result = -1;
1655
            }
1656
          continue;
1657
 
1658
        case 'p':
1659
          if (address_operand (op, VOIDmode))
1660
            result = 1;
1661
          break;
1662
 
1663
        case TARGET_MEM_CONSTRAINT:
1664
        case 'V': /* non-offsettable */
1665
          if (memory_operand (op, VOIDmode))
1666
            result = 1;
1667
          break;
1668
 
1669
        case 'o': /* offsettable */
1670
          if (offsettable_nonstrict_memref_p (op))
1671
            result = 1;
1672
          break;
1673
 
1674
        case '<':
1675
          /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1676
             excepting those that expand_call created.  Further, on some
1677
             machines which do not have generalized auto inc/dec, an inc/dec
1678
             is not a memory_operand.
1679
 
1680
             Match any memory and hope things are resolved after reload.  */
1681
 
1682
          if (MEM_P (op)
1683
              && (1
1684
                  || GET_CODE (XEXP (op, 0)) == PRE_DEC
1685
                  || GET_CODE (XEXP (op, 0)) == POST_DEC))
1686
            result = 1;
1687
#ifdef AUTO_INC_DEC
1688
          incdec_ok = true;
1689
#endif
1690
          break;
1691
 
1692
        case '>':
1693
          if (MEM_P (op)
1694
              && (1
1695
                  || GET_CODE (XEXP (op, 0)) == PRE_INC
1696
                  || GET_CODE (XEXP (op, 0)) == POST_INC))
1697
            result = 1;
1698
#ifdef AUTO_INC_DEC
1699
          incdec_ok = true;
1700
#endif
1701
          break;
1702
 
1703
        case 'E':
1704
        case 'F':
1705
          if (GET_CODE (op) == CONST_DOUBLE
1706
              || (GET_CODE (op) == CONST_VECTOR
1707
                  && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1708
            result = 1;
1709
          break;
1710
 
1711
        case 'G':
1712
          if (GET_CODE (op) == CONST_DOUBLE
1713
              && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1714
            result = 1;
1715
          break;
1716
        case 'H':
1717
          if (GET_CODE (op) == CONST_DOUBLE
1718
              && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1719
            result = 1;
1720
          break;
1721
 
1722
        case 's':
1723
          if (CONST_INT_P (op)
1724
              || (GET_CODE (op) == CONST_DOUBLE
1725
                  && GET_MODE (op) == VOIDmode))
1726
            break;
1727
          /* Fall through.  */
1728
 
1729
        case 'i':
1730
          if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1731
            result = 1;
1732
          break;
1733
 
1734
        case 'n':
1735
          if (CONST_INT_P (op)
1736
              || (GET_CODE (op) == CONST_DOUBLE
1737
                  && GET_MODE (op) == VOIDmode))
1738
            result = 1;
1739
          break;
1740
 
1741
        case 'I':
1742
          if (CONST_INT_P (op)
1743
              && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1744
            result = 1;
1745
          break;
1746
        case 'J':
1747
          if (CONST_INT_P (op)
1748
              && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1749
            result = 1;
1750
          break;
1751
        case 'K':
1752
          if (CONST_INT_P (op)
1753
              && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1754
            result = 1;
1755
          break;
1756
        case 'L':
1757
          if (CONST_INT_P (op)
1758
              && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1759
            result = 1;
1760
          break;
1761
        case 'M':
1762
          if (CONST_INT_P (op)
1763
              && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1764
            result = 1;
1765
          break;
1766
        case 'N':
1767
          if (CONST_INT_P (op)
1768
              && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1769
            result = 1;
1770
          break;
1771
        case 'O':
1772
          if (CONST_INT_P (op)
1773
              && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1774
            result = 1;
1775
          break;
1776
        case 'P':
1777
          if (CONST_INT_P (op)
1778
              && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1779
            result = 1;
1780
          break;
1781
 
1782
        case 'X':
1783
          result = 1;
1784
          break;
1785
 
1786
        case 'g':
1787
          if (general_operand (op, VOIDmode))
1788
            result = 1;
1789
          break;
1790
 
1791
        default:
1792
          /* For all other letters, we first check for a register class,
1793
             otherwise it is an EXTRA_CONSTRAINT.  */
1794
          if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1795
            {
1796
            case 'r':
1797
              if (GET_MODE (op) == BLKmode)
1798
                break;
1799
              if (register_operand (op, VOIDmode))
1800
                result = 1;
1801
            }
1802
#ifdef EXTRA_CONSTRAINT_STR
1803
          else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1804
            /* Every memory operand can be reloaded to fit.  */
1805
            result = result || memory_operand (op, VOIDmode);
1806
          else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1807
            /* Every address operand can be reloaded to fit.  */
1808
            result = result || address_operand (op, VOIDmode);
1809
          else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1810
            result = 1;
1811
#endif
1812
          break;
1813
        }
1814
      len = CONSTRAINT_LEN (c, constraint);
1815
      do
1816
        constraint++;
1817
      while (--len && *constraint);
1818
      if (len)
1819
        return 0;
1820
    }
1821
 
1822
#ifdef AUTO_INC_DEC
1823
  /* For operands without < or > constraints reject side-effects.  */
1824
  if (!incdec_ok && result && MEM_P (op))
1825
    switch (GET_CODE (XEXP (op, 0)))
1826
      {
1827
      case PRE_INC:
1828
      case POST_INC:
1829
      case PRE_DEC:
1830
      case POST_DEC:
1831
      case PRE_MODIFY:
1832
      case POST_MODIFY:
1833
        return 0;
1834
      default:
1835
        break;
1836
      }
1837
#endif
1838
 
1839
  return result;
1840
}
1841
 
1842
/* Given an rtx *P, if it is a sum containing an integer constant term,
1843
   return the location (type rtx *) of the pointer to that constant term.
1844
   Otherwise, return a null pointer.  */
1845
 
1846
rtx *
1847
find_constant_term_loc (rtx *p)
1848
{
1849
  rtx *tem;
1850
  enum rtx_code code = GET_CODE (*p);
1851
 
1852
  /* If *P IS such a constant term, P is its location.  */
1853
 
1854
  if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1855
      || code == CONST)
1856
    return p;
1857
 
1858
  /* Otherwise, if not a sum, it has no constant term.  */
1859
 
1860
  if (GET_CODE (*p) != PLUS)
1861
    return 0;
1862
 
1863
  /* If one of the summands is constant, return its location.  */
1864
 
1865
  if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1866
      && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1867
    return p;
1868
 
1869
  /* Otherwise, check each summand for containing a constant term.  */
1870
 
1871
  if (XEXP (*p, 0) != 0)
1872
    {
1873
      tem = find_constant_term_loc (&XEXP (*p, 0));
1874
      if (tem != 0)
1875
        return tem;
1876
    }
1877
 
1878
  if (XEXP (*p, 1) != 0)
1879
    {
1880
      tem = find_constant_term_loc (&XEXP (*p, 1));
1881
      if (tem != 0)
1882
        return tem;
1883
    }
1884
 
1885
  return 0;
1886
}
1887
 
1888
/* Return 1 if OP is a memory reference
1889
   whose address contains no side effects
1890
   and remains valid after the addition
1891
   of a positive integer less than the
1892
   size of the object being referenced.
1893
 
1894
   We assume that the original address is valid and do not check it.
1895
 
1896
   This uses strict_memory_address_p as a subroutine, so
1897
   don't use it before reload.  */
1898
 
1899
int
1900
offsettable_memref_p (rtx op)
1901
{
1902
  return ((MEM_P (op))
1903
          && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1904
                                               MEM_ADDR_SPACE (op)));
1905
}
1906
 
1907
/* Similar, but don't require a strictly valid mem ref:
1908
   consider pseudo-regs valid as index or base regs.  */
1909
 
1910
int
1911
offsettable_nonstrict_memref_p (rtx op)
1912
{
1913
  return ((MEM_P (op))
1914
          && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1915
                                               MEM_ADDR_SPACE (op)));
1916
}
1917
 
1918
/* Return 1 if Y is a memory address which contains no side effects
1919
   and would remain valid for address space AS after the addition of
1920
   a positive integer less than the size of that mode.
1921
 
1922
   We assume that the original address is valid and do not check it.
1923
   We do check that it is valid for narrower modes.
1924
 
1925
   If STRICTP is nonzero, we require a strictly valid address,
1926
   for the sake of use in reload.c.  */
1927
 
1928
int
1929
offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1930
                                  addr_space_t as)
1931
{
1932
  enum rtx_code ycode = GET_CODE (y);
1933
  rtx z;
1934
  rtx y1 = y;
1935
  rtx *y2;
1936
  int (*addressp) (enum machine_mode, rtx, addr_space_t) =
1937
    (strictp ? strict_memory_address_addr_space_p
1938
             : memory_address_addr_space_p);
1939
  unsigned int mode_sz = GET_MODE_SIZE (mode);
1940
 
1941
  if (CONSTANT_ADDRESS_P (y))
1942
    return 1;
1943
 
1944
  /* Adjusting an offsettable address involves changing to a narrower mode.
1945
     Make sure that's OK.  */
1946
 
1947
  if (mode_dependent_address_p (y))
1948
    return 0;
1949
 
1950
  /* ??? How much offset does an offsettable BLKmode reference need?
1951
     Clearly that depends on the situation in which it's being used.
1952
     However, the current situation in which we test 0xffffffff is
1953
     less than ideal.  Caveat user.  */
1954
  if (mode_sz == 0)
1955
    mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1956
 
1957
  /* If the expression contains a constant term,
1958
     see if it remains valid when max possible offset is added.  */
1959
 
1960
  if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1961
    {
1962
      int good;
1963
 
1964
      y1 = *y2;
1965
      *y2 = plus_constant (*y2, mode_sz - 1);
1966
      /* Use QImode because an odd displacement may be automatically invalid
1967
         for any wider mode.  But it should be valid for a single byte.  */
1968
      good = (*addressp) (QImode, y, as);
1969
 
1970
      /* In any case, restore old contents of memory.  */
1971
      *y2 = y1;
1972
      return good;
1973
    }
1974
 
1975
  if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1976
    return 0;
1977
 
1978
  /* The offset added here is chosen as the maximum offset that
1979
     any instruction could need to add when operating on something
1980
     of the specified mode.  We assume that if Y and Y+c are
1981
     valid addresses then so is Y+d for all 0<d<c.  adjust_address will
1982
     go inside a LO_SUM here, so we do so as well.  */
1983
  if (GET_CODE (y) == LO_SUM
1984
      && mode != BLKmode
1985
      && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1986
    z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1987
                        plus_constant (XEXP (y, 1), mode_sz - 1));
1988
  else
1989
    z = plus_constant (y, mode_sz - 1);
1990
 
1991
  /* Use QImode because an odd displacement may be automatically invalid
1992
     for any wider mode.  But it should be valid for a single byte.  */
1993
  return (*addressp) (QImode, z, as);
1994
}
1995
 
1996
/* Return 1 if ADDR is an address-expression whose effect depends
1997
   on the mode of the memory reference it is used in.
1998
 
1999
   Autoincrement addressing is a typical example of mode-dependence
2000
   because the amount of the increment depends on the mode.  */
2001
 
2002
bool
2003
mode_dependent_address_p (rtx addr)
2004
{
2005
  /* Auto-increment addressing with anything other than post_modify
2006
     or pre_modify always introduces a mode dependency.  Catch such
2007
     cases now instead of deferring to the target.  */
2008
  if (GET_CODE (addr) == PRE_INC
2009
      || GET_CODE (addr) == POST_INC
2010
      || GET_CODE (addr) == PRE_DEC
2011
      || GET_CODE (addr) == POST_DEC)
2012
    return true;
2013
 
2014
  return targetm.mode_dependent_address_p (addr);
2015
}
2016
 
2017
/* Like extract_insn, but save insn extracted and don't extract again, when
2018
   called again for the same insn expecting that recog_data still contain the
2019
   valid information.  This is used primary by gen_attr infrastructure that
2020
   often does extract insn again and again.  */
2021
void
2022
extract_insn_cached (rtx insn)
2023
{
2024
  if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2025
    return;
2026
  extract_insn (insn);
2027
  recog_data.insn = insn;
2028
}
2029
 
2030
/* Do cached extract_insn, constrain_operands and complain about failures.
2031
   Used by insn_attrtab.  */
2032
void
2033
extract_constrain_insn_cached (rtx insn)
2034
{
2035
  extract_insn_cached (insn);
2036
  if (which_alternative == -1
2037
      && !constrain_operands (reload_completed))
2038
    fatal_insn_not_found (insn);
2039
}
2040
 
2041
/* Do cached constrain_operands and complain about failures.  */
2042
int
2043
constrain_operands_cached (int strict)
2044
{
2045
  if (which_alternative == -1)
2046
    return constrain_operands (strict);
2047
  else
2048
    return 1;
2049
}
2050
 
2051
/* Analyze INSN and fill in recog_data.  */
2052
 
2053
void
2054
extract_insn (rtx insn)
2055
{
2056
  int i;
2057
  int icode;
2058
  int noperands;
2059
  rtx body = PATTERN (insn);
2060
 
2061
  recog_data.n_operands = 0;
2062
  recog_data.n_alternatives = 0;
2063
  recog_data.n_dups = 0;
2064
  recog_data.is_asm = false;
2065
 
2066
  switch (GET_CODE (body))
2067
    {
2068
    case USE:
2069
    case CLOBBER:
2070
    case ASM_INPUT:
2071
    case ADDR_VEC:
2072
    case ADDR_DIFF_VEC:
2073
    case VAR_LOCATION:
2074
      return;
2075
 
2076
    case SET:
2077
      if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2078
        goto asm_insn;
2079
      else
2080
        goto normal_insn;
2081
    case PARALLEL:
2082
      if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2083
           && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2084
          || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2085
        goto asm_insn;
2086
      else
2087
        goto normal_insn;
2088
    case ASM_OPERANDS:
2089
    asm_insn:
2090
      recog_data.n_operands = noperands = asm_noperands (body);
2091
      if (noperands >= 0)
2092
        {
2093
          /* This insn is an `asm' with operands.  */
2094
 
2095
          /* expand_asm_operands makes sure there aren't too many operands.  */
2096
          gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2097
 
2098
          /* Now get the operand values and constraints out of the insn.  */
2099
          decode_asm_operands (body, recog_data.operand,
2100
                               recog_data.operand_loc,
2101
                               recog_data.constraints,
2102
                               recog_data.operand_mode, NULL);
2103
          memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2104
          if (noperands > 0)
2105
            {
2106
              const char *p =  recog_data.constraints[0];
2107
              recog_data.n_alternatives = 1;
2108
              while (*p)
2109
                recog_data.n_alternatives += (*p++ == ',');
2110
            }
2111
          recog_data.is_asm = true;
2112
          break;
2113
        }
2114
      fatal_insn_not_found (insn);
2115
 
2116
    default:
2117
    normal_insn:
2118
      /* Ordinary insn: recognize it, get the operands via insn_extract
2119
         and get the constraints.  */
2120
 
2121
      icode = recog_memoized (insn);
2122
      if (icode < 0)
2123
        fatal_insn_not_found (insn);
2124
 
2125
      recog_data.n_operands = noperands = insn_data[icode].n_operands;
2126
      recog_data.n_alternatives = insn_data[icode].n_alternatives;
2127
      recog_data.n_dups = insn_data[icode].n_dups;
2128
 
2129
      insn_extract (insn);
2130
 
2131
      for (i = 0; i < noperands; i++)
2132
        {
2133
          recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2134
          recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2135
          recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2136
          /* VOIDmode match_operands gets mode from their real operand.  */
2137
          if (recog_data.operand_mode[i] == VOIDmode)
2138
            recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2139
        }
2140
    }
2141
  for (i = 0; i < noperands; i++)
2142
    recog_data.operand_type[i]
2143
      = (recog_data.constraints[i][0] == '=' ? OP_OUT
2144
         : recog_data.constraints[i][0] == '+' ? OP_INOUT
2145
         : OP_IN);
2146
 
2147
  gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2148
 
2149
  if (INSN_CODE (insn) < 0)
2150
    for (i = 0; i < recog_data.n_alternatives; i++)
2151
      recog_data.alternative_enabled_p[i] = true;
2152
  else
2153
    {
2154
      recog_data.insn = insn;
2155
      for (i = 0; i < recog_data.n_alternatives; i++)
2156
        {
2157
          which_alternative = i;
2158
          recog_data.alternative_enabled_p[i] = get_attr_enabled (insn);
2159
        }
2160
    }
2161
 
2162
  recog_data.insn = NULL;
2163
  which_alternative = -1;
2164
}
2165
 
2166
/* After calling extract_insn, you can use this function to extract some
2167
   information from the constraint strings into a more usable form.
2168
   The collected data is stored in recog_op_alt.  */
2169
void
2170
preprocess_constraints (void)
2171
{
2172
  int i;
2173
 
2174
  for (i = 0; i < recog_data.n_operands; i++)
2175
    memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2176
                                 * sizeof (struct operand_alternative)));
2177
 
2178
  for (i = 0; i < recog_data.n_operands; i++)
2179
    {
2180
      int j;
2181
      struct operand_alternative *op_alt;
2182
      const char *p = recog_data.constraints[i];
2183
 
2184
      op_alt = recog_op_alt[i];
2185
 
2186
      for (j = 0; j < recog_data.n_alternatives; j++)
2187
        {
2188
          op_alt[j].cl = NO_REGS;
2189
          op_alt[j].constraint = p;
2190
          op_alt[j].matches = -1;
2191
          op_alt[j].matched = -1;
2192
 
2193
          if (!recog_data.alternative_enabled_p[j])
2194
            {
2195
              p = skip_alternative (p);
2196
              continue;
2197
            }
2198
 
2199
          if (*p == '\0' || *p == ',')
2200
            {
2201
              op_alt[j].anything_ok = 1;
2202
              continue;
2203
            }
2204
 
2205
          for (;;)
2206
            {
2207
              char c = *p;
2208
              if (c == '#')
2209
                do
2210
                  c = *++p;
2211
                while (c != ',' && c != '\0');
2212
              if (c == ',' || c == '\0')
2213
                {
2214
                  p++;
2215
                  break;
2216
                }
2217
 
2218
              switch (c)
2219
                {
2220
                case '=': case '+': case '*': case '%':
2221
                case 'E': case 'F': case 'G': case 'H':
2222
                case 's': case 'i': case 'n':
2223
                case 'I': case 'J': case 'K': case 'L':
2224
                case 'M': case 'N': case 'O': case 'P':
2225
                  /* These don't say anything we care about.  */
2226
                  break;
2227
 
2228
                case '?':
2229
                  op_alt[j].reject += 6;
2230
                  break;
2231
                case '!':
2232
                  op_alt[j].reject += 600;
2233
                  break;
2234
                case '&':
2235
                  op_alt[j].earlyclobber = 1;
2236
                  break;
2237
 
2238
                case '0': case '1': case '2': case '3': case '4':
2239
                case '5': case '6': case '7': case '8': case '9':
2240
                  {
2241
                    char *end;
2242
                    op_alt[j].matches = strtoul (p, &end, 10);
2243
                    recog_op_alt[op_alt[j].matches][j].matched = i;
2244
                    p = end;
2245
                  }
2246
                  continue;
2247
 
2248
                case TARGET_MEM_CONSTRAINT:
2249
                  op_alt[j].memory_ok = 1;
2250
                  break;
2251
                case '<':
2252
                  op_alt[j].decmem_ok = 1;
2253
                  break;
2254
                case '>':
2255
                  op_alt[j].incmem_ok = 1;
2256
                  break;
2257
                case 'V':
2258
                  op_alt[j].nonoffmem_ok = 1;
2259
                  break;
2260
                case 'o':
2261
                  op_alt[j].offmem_ok = 1;
2262
                  break;
2263
                case 'X':
2264
                  op_alt[j].anything_ok = 1;
2265
                  break;
2266
 
2267
                case 'p':
2268
                  op_alt[j].is_address = 1;
2269
                  op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2270
                      [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2271
                                             ADDRESS, SCRATCH)];
2272
                  break;
2273
 
2274
                case 'g':
2275
                case 'r':
2276
                  op_alt[j].cl =
2277
                   reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2278
                  break;
2279
 
2280
                default:
2281
                  if (EXTRA_MEMORY_CONSTRAINT (c, p))
2282
                    {
2283
                      op_alt[j].memory_ok = 1;
2284
                      break;
2285
                    }
2286
                  if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2287
                    {
2288
                      op_alt[j].is_address = 1;
2289
                      op_alt[j].cl
2290
                        = (reg_class_subunion
2291
                           [(int) op_alt[j].cl]
2292
                           [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2293
                                                  ADDRESS, SCRATCH)]);
2294
                      break;
2295
                    }
2296
 
2297
                  op_alt[j].cl
2298
                    = (reg_class_subunion
2299
                       [(int) op_alt[j].cl]
2300
                       [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2301
                  break;
2302
                }
2303
              p += CONSTRAINT_LEN (c, p);
2304
            }
2305
        }
2306
    }
2307
}
2308
 
2309
/* Check the operands of an insn against the insn's operand constraints
2310
   and return 1 if they are valid.
2311
   The information about the insn's operands, constraints, operand modes
2312
   etc. is obtained from the global variables set up by extract_insn.
2313
 
2314
   WHICH_ALTERNATIVE is set to a number which indicates which
2315
   alternative of constraints was matched: 0 for the first alternative,
2316
   1 for the next, etc.
2317
 
2318
   In addition, when two operands are required to match
2319
   and it happens that the output operand is (reg) while the
2320
   input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2321
   make the output operand look like the input.
2322
   This is because the output operand is the one the template will print.
2323
 
2324
   This is used in final, just before printing the assembler code and by
2325
   the routines that determine an insn's attribute.
2326
 
2327
   If STRICT is a positive nonzero value, it means that we have been
2328
   called after reload has been completed.  In that case, we must
2329
   do all checks strictly.  If it is zero, it means that we have been called
2330
   before reload has completed.  In that case, we first try to see if we can
2331
   find an alternative that matches strictly.  If not, we try again, this
2332
   time assuming that reload will fix up the insn.  This provides a "best
2333
   guess" for the alternative and is used to compute attributes of insns prior
2334
   to reload.  A negative value of STRICT is used for this internal call.  */
2335
 
2336
struct funny_match
2337
{
2338
  int this_op, other;
2339
};
2340
 
2341
int
2342
constrain_operands (int strict)
2343
{
2344
  const char *constraints[MAX_RECOG_OPERANDS];
2345
  int matching_operands[MAX_RECOG_OPERANDS];
2346
  int earlyclobber[MAX_RECOG_OPERANDS];
2347
  int c;
2348
 
2349
  struct funny_match funny_match[MAX_RECOG_OPERANDS];
2350
  int funny_match_index;
2351
 
2352
  which_alternative = 0;
2353
  if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2354
    return 1;
2355
 
2356
  for (c = 0; c < recog_data.n_operands; c++)
2357
    {
2358
      constraints[c] = recog_data.constraints[c];
2359
      matching_operands[c] = -1;
2360
    }
2361
 
2362
  do
2363
    {
2364
      int seen_earlyclobber_at = -1;
2365
      int opno;
2366
      int lose = 0;
2367
      funny_match_index = 0;
2368
 
2369
      if (!recog_data.alternative_enabled_p[which_alternative])
2370
        {
2371
          int i;
2372
 
2373
          for (i = 0; i < recog_data.n_operands; i++)
2374
            constraints[i] = skip_alternative (constraints[i]);
2375
 
2376
          which_alternative++;
2377
          continue;
2378
        }
2379
 
2380
      for (opno = 0; opno < recog_data.n_operands; opno++)
2381
        {
2382
          rtx op = recog_data.operand[opno];
2383
          enum machine_mode mode = GET_MODE (op);
2384
          const char *p = constraints[opno];
2385
          int offset = 0;
2386
          int win = 0;
2387
          int val;
2388
          int len;
2389
 
2390
          earlyclobber[opno] = 0;
2391
 
2392
          /* A unary operator may be accepted by the predicate, but it
2393
             is irrelevant for matching constraints.  */
2394
          if (UNARY_P (op))
2395
            op = XEXP (op, 0);
2396
 
2397
          if (GET_CODE (op) == SUBREG)
2398
            {
2399
              if (REG_P (SUBREG_REG (op))
2400
                  && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2401
                offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2402
                                              GET_MODE (SUBREG_REG (op)),
2403
                                              SUBREG_BYTE (op),
2404
                                              GET_MODE (op));
2405
              op = SUBREG_REG (op);
2406
            }
2407
 
2408
          /* An empty constraint or empty alternative
2409
             allows anything which matched the pattern.  */
2410
          if (*p == 0 || *p == ',')
2411
            win = 1;
2412
 
2413
          do
2414
            switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2415
              {
2416
              case '\0':
2417
                len = 0;
2418
                break;
2419
              case ',':
2420
                c = '\0';
2421
                break;
2422
 
2423
              case '?':  case '!': case '*':  case '%':
2424
              case '=':  case '+':
2425
                break;
2426
 
2427
              case '#':
2428
                /* Ignore rest of this alternative as far as
2429
                   constraint checking is concerned.  */
2430
                do
2431
                  p++;
2432
                while (*p && *p != ',');
2433
                len = 0;
2434
                break;
2435
 
2436
              case '&':
2437
                earlyclobber[opno] = 1;
2438
                if (seen_earlyclobber_at < 0)
2439
                  seen_earlyclobber_at = opno;
2440
                break;
2441
 
2442
              case '0':  case '1':  case '2':  case '3':  case '4':
2443
              case '5':  case '6':  case '7':  case '8':  case '9':
2444
                {
2445
                  /* This operand must be the same as a previous one.
2446
                     This kind of constraint is used for instructions such
2447
                     as add when they take only two operands.
2448
 
2449
                     Note that the lower-numbered operand is passed first.
2450
 
2451
                     If we are not testing strictly, assume that this
2452
                     constraint will be satisfied.  */
2453
 
2454
                  char *end;
2455
                  int match;
2456
 
2457
                  match = strtoul (p, &end, 10);
2458
                  p = end;
2459
 
2460
                  if (strict < 0)
2461
                    val = 1;
2462
                  else
2463
                    {
2464
                      rtx op1 = recog_data.operand[match];
2465
                      rtx op2 = recog_data.operand[opno];
2466
 
2467
                      /* A unary operator may be accepted by the predicate,
2468
                         but it is irrelevant for matching constraints.  */
2469
                      if (UNARY_P (op1))
2470
                        op1 = XEXP (op1, 0);
2471
                      if (UNARY_P (op2))
2472
                        op2 = XEXP (op2, 0);
2473
 
2474
                      val = operands_match_p (op1, op2);
2475
                    }
2476
 
2477
                  matching_operands[opno] = match;
2478
                  matching_operands[match] = opno;
2479
 
2480
                  if (val != 0)
2481
                    win = 1;
2482
 
2483
                  /* If output is *x and input is *--x, arrange later
2484
                     to change the output to *--x as well, since the
2485
                     output op is the one that will be printed.  */
2486
                  if (val == 2 && strict > 0)
2487
                    {
2488
                      funny_match[funny_match_index].this_op = opno;
2489
                      funny_match[funny_match_index++].other = match;
2490
                    }
2491
                }
2492
                len = 0;
2493
                break;
2494
 
2495
              case 'p':
2496
                /* p is used for address_operands.  When we are called by
2497
                   gen_reload, no one will have checked that the address is
2498
                   strictly valid, i.e., that all pseudos requiring hard regs
2499
                   have gotten them.  */
2500
                if (strict <= 0
2501
                    || (strict_memory_address_p (recog_data.operand_mode[opno],
2502
                                                 op)))
2503
                  win = 1;
2504
                break;
2505
 
2506
                /* No need to check general_operand again;
2507
                   it was done in insn-recog.c.  Well, except that reload
2508
                   doesn't check the validity of its replacements, but
2509
                   that should only matter when there's a bug.  */
2510
              case 'g':
2511
                /* Anything goes unless it is a REG and really has a hard reg
2512
                   but the hard reg is not in the class GENERAL_REGS.  */
2513
                if (REG_P (op))
2514
                  {
2515
                    if (strict < 0
2516
                        || GENERAL_REGS == ALL_REGS
2517
                        || (reload_in_progress
2518
                            && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2519
                        || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2520
                      win = 1;
2521
                  }
2522
                else if (strict < 0 || general_operand (op, mode))
2523
                  win = 1;
2524
                break;
2525
 
2526
              case 'X':
2527
                /* This is used for a MATCH_SCRATCH in the cases when
2528
                   we don't actually need anything.  So anything goes
2529
                   any time.  */
2530
                win = 1;
2531
                break;
2532
 
2533
              case TARGET_MEM_CONSTRAINT:
2534
                /* Memory operands must be valid, to the extent
2535
                   required by STRICT.  */
2536
                if (MEM_P (op))
2537
                  {
2538
                    if (strict > 0
2539
                        && !strict_memory_address_addr_space_p
2540
                             (GET_MODE (op), XEXP (op, 0),
2541
                              MEM_ADDR_SPACE (op)))
2542
                      break;
2543
                    if (strict == 0
2544
                        && !memory_address_addr_space_p
2545
                             (GET_MODE (op), XEXP (op, 0),
2546
                              MEM_ADDR_SPACE (op)))
2547
                      break;
2548
                    win = 1;
2549
                  }
2550
                /* Before reload, accept what reload can turn into mem.  */
2551
                else if (strict < 0 && CONSTANT_P (op))
2552
                  win = 1;
2553
                /* During reload, accept a pseudo  */
2554
                else if (reload_in_progress && REG_P (op)
2555
                         && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2556
                  win = 1;
2557
                break;
2558
 
2559
              case '<':
2560
                if (MEM_P (op)
2561
                    && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2562
                        || GET_CODE (XEXP (op, 0)) == POST_DEC))
2563
                  win = 1;
2564
                break;
2565
 
2566
              case '>':
2567
                if (MEM_P (op)
2568
                    && (GET_CODE (XEXP (op, 0)) == PRE_INC
2569
                        || GET_CODE (XEXP (op, 0)) == POST_INC))
2570
                  win = 1;
2571
                break;
2572
 
2573
              case 'E':
2574
              case 'F':
2575
                if (GET_CODE (op) == CONST_DOUBLE
2576
                    || (GET_CODE (op) == CONST_VECTOR
2577
                        && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2578
                  win = 1;
2579
                break;
2580
 
2581
              case 'G':
2582
              case 'H':
2583
                if (GET_CODE (op) == CONST_DOUBLE
2584
                    && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2585
                  win = 1;
2586
                break;
2587
 
2588
              case 's':
2589
                if (CONST_INT_P (op)
2590
                    || (GET_CODE (op) == CONST_DOUBLE
2591
                        && GET_MODE (op) == VOIDmode))
2592
                  break;
2593
              case 'i':
2594
                if (CONSTANT_P (op))
2595
                  win = 1;
2596
                break;
2597
 
2598
              case 'n':
2599
                if (CONST_INT_P (op)
2600
                    || (GET_CODE (op) == CONST_DOUBLE
2601
                        && GET_MODE (op) == VOIDmode))
2602
                  win = 1;
2603
                break;
2604
 
2605
              case 'I':
2606
              case 'J':
2607
              case 'K':
2608
              case 'L':
2609
              case 'M':
2610
              case 'N':
2611
              case 'O':
2612
              case 'P':
2613
                if (CONST_INT_P (op)
2614
                    && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2615
                  win = 1;
2616
                break;
2617
 
2618
              case 'V':
2619
                if (MEM_P (op)
2620
                    && ((strict > 0 && ! offsettable_memref_p (op))
2621
                        || (strict < 0
2622
                            && !(CONSTANT_P (op) || MEM_P (op)))
2623
                        || (reload_in_progress
2624
                            && !(REG_P (op)
2625
                                 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2626
                  win = 1;
2627
                break;
2628
 
2629
              case 'o':
2630
                if ((strict > 0 && offsettable_memref_p (op))
2631
                    || (strict == 0 && offsettable_nonstrict_memref_p (op))
2632
                    /* Before reload, accept what reload can handle.  */
2633
                    || (strict < 0
2634
                        && (CONSTANT_P (op) || MEM_P (op)))
2635
                    /* During reload, accept a pseudo  */
2636
                    || (reload_in_progress && REG_P (op)
2637
                        && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2638
                  win = 1;
2639
                break;
2640
 
2641
              default:
2642
                {
2643
                  enum reg_class cl;
2644
 
2645
                  cl = (c == 'r'
2646
                           ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2647
                  if (cl != NO_REGS)
2648
                    {
2649
                      if (strict < 0
2650
                          || (strict == 0
2651
                              && REG_P (op)
2652
                              && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2653
                          || (strict == 0 && GET_CODE (op) == SCRATCH)
2654
                          || (REG_P (op)
2655
                              && reg_fits_class_p (op, cl, offset, mode)))
2656
                        win = 1;
2657
                    }
2658
#ifdef EXTRA_CONSTRAINT_STR
2659
                  else if (EXTRA_CONSTRAINT_STR (op, c, p))
2660
                    win = 1;
2661
 
2662
                  else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2663
                           /* Every memory operand can be reloaded to fit.  */
2664
                           && ((strict < 0 && MEM_P (op))
2665
                               /* Before reload, accept what reload can turn
2666
                                  into mem.  */
2667
                               || (strict < 0 && CONSTANT_P (op))
2668
                               /* During reload, accept a pseudo  */
2669
                               || (reload_in_progress && REG_P (op)
2670
                                   && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2671
                    win = 1;
2672
                  else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2673
                           /* Every address operand can be reloaded to fit.  */
2674
                           && strict < 0)
2675
                    win = 1;
2676
#endif
2677
                  break;
2678
                }
2679
              }
2680
          while (p += len, c);
2681
 
2682
          constraints[opno] = p;
2683
          /* If this operand did not win somehow,
2684
             this alternative loses.  */
2685
          if (! win)
2686
            lose = 1;
2687
        }
2688
      /* This alternative won; the operands are ok.
2689
         Change whichever operands this alternative says to change.  */
2690
      if (! lose)
2691
        {
2692
          int opno, eopno;
2693
 
2694
          /* See if any earlyclobber operand conflicts with some other
2695
             operand.  */
2696
 
2697
          if (strict > 0  && seen_earlyclobber_at >= 0)
2698
            for (eopno = seen_earlyclobber_at;
2699
                 eopno < recog_data.n_operands;
2700
                 eopno++)
2701
              /* Ignore earlyclobber operands now in memory,
2702
                 because we would often report failure when we have
2703
                 two memory operands, one of which was formerly a REG.  */
2704
              if (earlyclobber[eopno]
2705
                  && REG_P (recog_data.operand[eopno]))
2706
                for (opno = 0; opno < recog_data.n_operands; opno++)
2707
                  if ((MEM_P (recog_data.operand[opno])
2708
                       || recog_data.operand_type[opno] != OP_OUT)
2709
                      && opno != eopno
2710
                      /* Ignore things like match_operator operands.  */
2711
                      && *recog_data.constraints[opno] != 0
2712
                      && ! (matching_operands[opno] == eopno
2713
                            && operands_match_p (recog_data.operand[opno],
2714
                                                 recog_data.operand[eopno]))
2715
                      && ! safe_from_earlyclobber (recog_data.operand[opno],
2716
                                                   recog_data.operand[eopno]))
2717
                    lose = 1;
2718
 
2719
          if (! lose)
2720
            {
2721
              while (--funny_match_index >= 0)
2722
                {
2723
                  recog_data.operand[funny_match[funny_match_index].other]
2724
                    = recog_data.operand[funny_match[funny_match_index].this_op];
2725
                }
2726
 
2727
#ifdef AUTO_INC_DEC
2728
              /* For operands without < or > constraints reject side-effects.  */
2729
              if (recog_data.is_asm)
2730
                {
2731
                  for (opno = 0; opno < recog_data.n_operands; opno++)
2732
                    if (MEM_P (recog_data.operand[opno]))
2733
                      switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2734
                        {
2735
                        case PRE_INC:
2736
                        case POST_INC:
2737
                        case PRE_DEC:
2738
                        case POST_DEC:
2739
                        case PRE_MODIFY:
2740
                        case POST_MODIFY:
2741
                          if (strchr (recog_data.constraints[opno], '<') == NULL
2742
                              && strchr (recog_data.constraints[opno], '>')
2743
                                 == NULL)
2744
                            return 0;
2745
                          break;
2746
                        default:
2747
                          break;
2748
                        }
2749
                }
2750
#endif
2751
              return 1;
2752
            }
2753
        }
2754
 
2755
      which_alternative++;
2756
    }
2757
  while (which_alternative < recog_data.n_alternatives);
2758
 
2759
  which_alternative = -1;
2760
  /* If we are about to reject this, but we are not to test strictly,
2761
     try a very loose test.  Only return failure if it fails also.  */
2762
  if (strict == 0)
2763
    return constrain_operands (-1);
2764
  else
2765
    return 0;
2766
}
2767
 
2768
/* Return true iff OPERAND (assumed to be a REG rtx)
2769
   is a hard reg in class CLASS when its regno is offset by OFFSET
2770
   and changed to mode MODE.
2771
   If REG occupies multiple hard regs, all of them must be in CLASS.  */
2772
 
2773
bool
2774
reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2775
                  enum machine_mode mode)
2776
{
2777
  int regno = REGNO (operand);
2778
 
2779
  if (cl == NO_REGS)
2780
    return false;
2781
 
2782
  return (HARD_REGISTER_NUM_P (regno)
2783
          && in_hard_reg_set_p (reg_class_contents[(int) cl],
2784
                                mode, regno + offset));
2785
}
2786
 
2787
/* Split single instruction.  Helper function for split_all_insns and
2788
   split_all_insns_noflow.  Return last insn in the sequence if successful,
2789
   or NULL if unsuccessful.  */
2790
 
2791
static rtx
2792
split_insn (rtx insn)
2793
{
2794
  /* Split insns here to get max fine-grain parallelism.  */
2795
  rtx first = PREV_INSN (insn);
2796
  rtx last = try_split (PATTERN (insn), insn, 1);
2797
  rtx insn_set, last_set, note;
2798
 
2799
  if (last == insn)
2800
    return NULL_RTX;
2801
 
2802
  /* If the original instruction was a single set that was known to be
2803
     equivalent to a constant, see if we can say the same about the last
2804
     instruction in the split sequence.  The two instructions must set
2805
     the same destination.  */
2806
  insn_set = single_set (insn);
2807
  if (insn_set)
2808
    {
2809
      last_set = single_set (last);
2810
      if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2811
        {
2812
          note = find_reg_equal_equiv_note (insn);
2813
          if (note && CONSTANT_P (XEXP (note, 0)))
2814
            set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2815
          else if (CONSTANT_P (SET_SRC (insn_set)))
2816
            set_unique_reg_note (last, REG_EQUAL, SET_SRC (insn_set));
2817
        }
2818
    }
2819
 
2820
  /* try_split returns the NOTE that INSN became.  */
2821
  SET_INSN_DELETED (insn);
2822
 
2823
  /* ??? Coddle to md files that generate subregs in post-reload
2824
     splitters instead of computing the proper hard register.  */
2825
  if (reload_completed && first != last)
2826
    {
2827
      first = NEXT_INSN (first);
2828
      for (;;)
2829
        {
2830
          if (INSN_P (first))
2831
            cleanup_subreg_operands (first);
2832
          if (first == last)
2833
            break;
2834
          first = NEXT_INSN (first);
2835
        }
2836
    }
2837
 
2838
  return last;
2839
}
2840
 
2841
/* Split all insns in the function.  If UPD_LIFE, update life info after.  */
2842
 
2843
void
2844
split_all_insns (void)
2845
{
2846
  sbitmap blocks;
2847
  bool changed;
2848
  basic_block bb;
2849
 
2850
  blocks = sbitmap_alloc (last_basic_block);
2851
  sbitmap_zero (blocks);
2852
  changed = false;
2853
 
2854
  FOR_EACH_BB_REVERSE (bb)
2855
    {
2856
      rtx insn, next;
2857
      bool finish = false;
2858
 
2859
      rtl_profile_for_bb (bb);
2860
      for (insn = BB_HEAD (bb); !finish ; insn = next)
2861
        {
2862
          /* Can't use `next_real_insn' because that might go across
2863
             CODE_LABELS and short-out basic blocks.  */
2864
          next = NEXT_INSN (insn);
2865
          finish = (insn == BB_END (bb));
2866
          if (INSN_P (insn))
2867
            {
2868
              rtx set = single_set (insn);
2869
 
2870
              /* Don't split no-op move insns.  These should silently
2871
                 disappear later in final.  Splitting such insns would
2872
                 break the code that handles LIBCALL blocks.  */
2873
              if (set && set_noop_p (set))
2874
                {
2875
                  /* Nops get in the way while scheduling, so delete them
2876
                     now if register allocation has already been done.  It
2877
                     is too risky to try to do this before register
2878
                     allocation, and there are unlikely to be very many
2879
                     nops then anyways.  */
2880
                  if (reload_completed)
2881
                      delete_insn_and_edges (insn);
2882
                }
2883
              else
2884
                {
2885
                  if (split_insn (insn))
2886
                    {
2887
                      SET_BIT (blocks, bb->index);
2888
                      changed = true;
2889
                    }
2890
                }
2891
            }
2892
        }
2893
    }
2894
 
2895
  default_rtl_profile ();
2896
  if (changed)
2897
    find_many_sub_basic_blocks (blocks);
2898
 
2899
#ifdef ENABLE_CHECKING
2900
  verify_flow_info ();
2901
#endif
2902
 
2903
  sbitmap_free (blocks);
2904
}
2905
 
2906
/* Same as split_all_insns, but do not expect CFG to be available.
2907
   Used by machine dependent reorg passes.  */
2908
 
2909
unsigned int
2910
split_all_insns_noflow (void)
2911
{
2912
  rtx next, insn;
2913
 
2914
  for (insn = get_insns (); insn; insn = next)
2915
    {
2916
      next = NEXT_INSN (insn);
2917
      if (INSN_P (insn))
2918
        {
2919
          /* Don't split no-op move insns.  These should silently
2920
             disappear later in final.  Splitting such insns would
2921
             break the code that handles LIBCALL blocks.  */
2922
          rtx set = single_set (insn);
2923
          if (set && set_noop_p (set))
2924
            {
2925
              /* Nops get in the way while scheduling, so delete them
2926
                 now if register allocation has already been done.  It
2927
                 is too risky to try to do this before register
2928
                 allocation, and there are unlikely to be very many
2929
                 nops then anyways.
2930
 
2931
                 ??? Should we use delete_insn when the CFG isn't valid?  */
2932
              if (reload_completed)
2933
                delete_insn_and_edges (insn);
2934
            }
2935
          else
2936
            split_insn (insn);
2937
        }
2938
    }
2939
  return 0;
2940
}
2941
 
2942
#ifdef HAVE_peephole2
2943
struct peep2_insn_data
2944
{
2945
  rtx insn;
2946
  regset live_before;
2947
};
2948
 
2949
static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2950
static int peep2_current;
2951
 
2952
static bool peep2_do_rebuild_jump_labels;
2953
static bool peep2_do_cleanup_cfg;
2954
 
2955
/* The number of instructions available to match a peep2.  */
2956
int peep2_current_count;
2957
 
2958
/* A non-insn marker indicating the last insn of the block.
2959
   The live_before regset for this element is correct, indicating
2960
   DF_LIVE_OUT for the block.  */
2961
#define PEEP2_EOB       pc_rtx
2962
 
2963
/* Wrap N to fit into the peep2_insn_data buffer.  */
2964
 
2965
static int
2966
peep2_buf_position (int n)
2967
{
2968
  if (n >= MAX_INSNS_PER_PEEP2 + 1)
2969
    n -= MAX_INSNS_PER_PEEP2 + 1;
2970
  return n;
2971
}
2972
 
2973
/* Return the Nth non-note insn after `current', or return NULL_RTX if it
2974
   does not exist.  Used by the recognizer to find the next insn to match
2975
   in a multi-insn pattern.  */
2976
 
2977
rtx
2978
peep2_next_insn (int n)
2979
{
2980
  gcc_assert (n <= peep2_current_count);
2981
 
2982
  n = peep2_buf_position (peep2_current + n);
2983
 
2984
  return peep2_insn_data[n].insn;
2985
}
2986
 
2987
/* Return true if REGNO is dead before the Nth non-note insn
2988
   after `current'.  */
2989
 
2990
int
2991
peep2_regno_dead_p (int ofs, int regno)
2992
{
2993
  gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2994
 
2995
  ofs = peep2_buf_position (peep2_current + ofs);
2996
 
2997
  gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2998
 
2999
  return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3000
}
3001
 
3002
/* Similarly for a REG.  */
3003
 
3004
int
3005
peep2_reg_dead_p (int ofs, rtx reg)
3006
{
3007
  int regno, n;
3008
 
3009
  gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3010
 
3011
  ofs = peep2_buf_position (peep2_current + ofs);
3012
 
3013
  gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3014
 
3015
  regno = REGNO (reg);
3016
  n = hard_regno_nregs[regno][GET_MODE (reg)];
3017
  while (--n >= 0)
3018
    if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3019
      return 0;
3020
  return 1;
3021
}
3022
 
3023
/* Try to find a hard register of mode MODE, matching the register class in
3024
   CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3025
   remains available until the end of LAST_INSN.  LAST_INSN may be NULL_RTX,
3026
   in which case the only condition is that the register must be available
3027
   before CURRENT_INSN.
3028
   Registers that already have bits set in REG_SET will not be considered.
3029
 
3030
   If an appropriate register is available, it will be returned and the
3031
   corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3032
   returned.  */
3033
 
3034
rtx
3035
peep2_find_free_register (int from, int to, const char *class_str,
3036
                          enum machine_mode mode, HARD_REG_SET *reg_set)
3037
{
3038
  static int search_ofs;
3039
  enum reg_class cl;
3040
  HARD_REG_SET live;
3041
  df_ref *def_rec;
3042
  int i;
3043
 
3044
  gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3045
  gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3046
 
3047
  from = peep2_buf_position (peep2_current + from);
3048
  to = peep2_buf_position (peep2_current + to);
3049
 
3050
  gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3051
  REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3052
 
3053
  while (from != to)
3054
    {
3055
      gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3056
 
3057
      /* Don't use registers set or clobbered by the insn.  */
3058
      for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
3059
           *def_rec; def_rec++)
3060
        SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
3061
 
3062
      from = peep2_buf_position (from + 1);
3063
    }
3064
 
3065
  cl = (class_str[0] == 'r' ? GENERAL_REGS
3066
           : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3067
 
3068
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3069
    {
3070
      int raw_regno, regno, success, j;
3071
 
3072
      /* Distribute the free registers as much as possible.  */
3073
      raw_regno = search_ofs + i;
3074
      if (raw_regno >= FIRST_PSEUDO_REGISTER)
3075
        raw_regno -= FIRST_PSEUDO_REGISTER;
3076
#ifdef REG_ALLOC_ORDER
3077
      regno = reg_alloc_order[raw_regno];
3078
#else
3079
      regno = raw_regno;
3080
#endif
3081
 
3082
      /* Don't allocate fixed registers.  */
3083
      if (fixed_regs[regno])
3084
        continue;
3085
      /* Don't allocate global registers.  */
3086
      if (global_regs[regno])
3087
        continue;
3088
      /* Make sure the register is of the right class.  */
3089
      if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3090
        continue;
3091
      /* And can support the mode we need.  */
3092
      if (! HARD_REGNO_MODE_OK (regno, mode))
3093
        continue;
3094
      /* And that we don't create an extra save/restore.  */
3095
      if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
3096
        continue;
3097
      if (! targetm.hard_regno_scratch_ok (regno))
3098
        continue;
3099
 
3100
      /* And we don't clobber traceback for noreturn functions.  */
3101
      if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3102
          && (! reload_completed || frame_pointer_needed))
3103
        continue;
3104
 
3105
      success = 1;
3106
      for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3107
        {
3108
          if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3109
              || TEST_HARD_REG_BIT (live, regno + j))
3110
            {
3111
              success = 0;
3112
              break;
3113
            }
3114
        }
3115
      if (success)
3116
        {
3117
          add_to_hard_reg_set (reg_set, mode, regno);
3118
 
3119
          /* Start the next search with the next register.  */
3120
          if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3121
            raw_regno = 0;
3122
          search_ofs = raw_regno;
3123
 
3124
          return gen_rtx_REG (mode, regno);
3125
        }
3126
    }
3127
 
3128
  search_ofs = 0;
3129
  return NULL_RTX;
3130
}
3131
 
3132
/* Forget all currently tracked instructions, only remember current
3133
   LIVE regset.  */
3134
 
3135
static void
3136
peep2_reinit_state (regset live)
3137
{
3138
  int i;
3139
 
3140
  /* Indicate that all slots except the last holds invalid data.  */
3141
  for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3142
    peep2_insn_data[i].insn = NULL_RTX;
3143
  peep2_current_count = 0;
3144
 
3145
  /* Indicate that the last slot contains live_after data.  */
3146
  peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3147
  peep2_current = MAX_INSNS_PER_PEEP2;
3148
 
3149
  COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3150
}
3151
 
3152
/* While scanning basic block BB, we found a match of length MATCH_LEN,
3153
   starting at INSN.  Perform the replacement, removing the old insns and
3154
   replacing them with ATTEMPT.  Returns the last insn emitted, or NULL
3155
   if the replacement is rejected.  */
3156
 
3157
static rtx
3158
peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3159
{
3160
  int i;
3161
  rtx last, eh_note, as_note, before_try, x;
3162
  rtx old_insn, new_insn;
3163
  bool was_call = false;
3164
 
3165
  /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3166
     match more than one insn, or to be split into more than one insn.  */
3167
  old_insn = peep2_insn_data[peep2_current].insn;
3168
  if (RTX_FRAME_RELATED_P (old_insn))
3169
    {
3170
      bool any_note = false;
3171
      rtx note;
3172
 
3173
      if (match_len != 0)
3174
        return NULL;
3175
 
3176
      /* Look for one "active" insn.  I.e. ignore any "clobber" insns that
3177
         may be in the stream for the purpose of register allocation.  */
3178
      if (active_insn_p (attempt))
3179
        new_insn = attempt;
3180
      else
3181
        new_insn = next_active_insn (attempt);
3182
      if (next_active_insn (new_insn))
3183
        return NULL;
3184
 
3185
      /* We have a 1-1 replacement.  Copy over any frame-related info.  */
3186
      RTX_FRAME_RELATED_P (new_insn) = 1;
3187
 
3188
      /* Allow the backend to fill in a note during the split.  */
3189
      for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3190
        switch (REG_NOTE_KIND (note))
3191
          {
3192
          case REG_FRAME_RELATED_EXPR:
3193
          case REG_CFA_DEF_CFA:
3194
          case REG_CFA_ADJUST_CFA:
3195
          case REG_CFA_OFFSET:
3196
          case REG_CFA_REGISTER:
3197
          case REG_CFA_EXPRESSION:
3198
          case REG_CFA_RESTORE:
3199
          case REG_CFA_SET_VDRAP:
3200
            any_note = true;
3201
            break;
3202
          default:
3203
            break;
3204
          }
3205
 
3206
      /* If the backend didn't supply a note, copy one over.  */
3207
      if (!any_note)
3208
        for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3209
          switch (REG_NOTE_KIND (note))
3210
            {
3211
            case REG_FRAME_RELATED_EXPR:
3212
            case REG_CFA_DEF_CFA:
3213
            case REG_CFA_ADJUST_CFA:
3214
            case REG_CFA_OFFSET:
3215
            case REG_CFA_REGISTER:
3216
            case REG_CFA_EXPRESSION:
3217
            case REG_CFA_RESTORE:
3218
            case REG_CFA_SET_VDRAP:
3219
              add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3220
              any_note = true;
3221
              break;
3222
            default:
3223
              break;
3224
            }
3225
 
3226
      /* If there still isn't a note, make sure the unwind info sees the
3227
         same expression as before the split.  */
3228
      if (!any_note)
3229
        {
3230
          rtx old_set, new_set;
3231
 
3232
          /* The old insn had better have been simple, or annotated.  */
3233
          old_set = single_set (old_insn);
3234
          gcc_assert (old_set != NULL);
3235
 
3236
          new_set = single_set (new_insn);
3237
          if (!new_set || !rtx_equal_p (new_set, old_set))
3238
            add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3239
        }
3240
 
3241
      /* Copy prologue/epilogue status.  This is required in order to keep
3242
         proper placement of EPILOGUE_BEG and the DW_CFA_remember_state.  */
3243
      maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3244
    }
3245
 
3246
  /* If we are splitting a CALL_INSN, look for the CALL_INSN
3247
     in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3248
     cfg-related call notes.  */
3249
  for (i = 0; i <= match_len; ++i)
3250
    {
3251
      int j;
3252
      rtx note;
3253
 
3254
      j = peep2_buf_position (peep2_current + i);
3255
      old_insn = peep2_insn_data[j].insn;
3256
      if (!CALL_P (old_insn))
3257
        continue;
3258
      was_call = true;
3259
 
3260
      new_insn = attempt;
3261
      while (new_insn != NULL_RTX)
3262
        {
3263
          if (CALL_P (new_insn))
3264
            break;
3265
          new_insn = NEXT_INSN (new_insn);
3266
        }
3267
 
3268
      gcc_assert (new_insn != NULL_RTX);
3269
 
3270
      CALL_INSN_FUNCTION_USAGE (new_insn)
3271
        = CALL_INSN_FUNCTION_USAGE (old_insn);
3272
 
3273
      for (note = REG_NOTES (old_insn);
3274
           note;
3275
           note = XEXP (note, 1))
3276
        switch (REG_NOTE_KIND (note))
3277
          {
3278
          case REG_NORETURN:
3279
          case REG_SETJMP:
3280
          case REG_TM:
3281
            add_reg_note (new_insn, REG_NOTE_KIND (note),
3282
                          XEXP (note, 0));
3283
            break;
3284
          default:
3285
            /* Discard all other reg notes.  */
3286
            break;
3287
          }
3288
 
3289
      /* Croak if there is another call in the sequence.  */
3290
      while (++i <= match_len)
3291
        {
3292
          j = peep2_buf_position (peep2_current + i);
3293
          old_insn = peep2_insn_data[j].insn;
3294
          gcc_assert (!CALL_P (old_insn));
3295
        }
3296
      break;
3297
    }
3298
 
3299
  /* If we matched any instruction that had a REG_ARGS_SIZE, then
3300
     move those notes over to the new sequence.  */
3301
  as_note = NULL;
3302
  for (i = match_len; i >= 0; --i)
3303
    {
3304
      int j = peep2_buf_position (peep2_current + i);
3305
      old_insn = peep2_insn_data[j].insn;
3306
 
3307
      as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3308
      if (as_note)
3309
        break;
3310
    }
3311
 
3312
  i = peep2_buf_position (peep2_current + match_len);
3313
  eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3314
 
3315
  /* Replace the old sequence with the new.  */
3316
  last = emit_insn_after_setloc (attempt,
3317
                                 peep2_insn_data[i].insn,
3318
                                 INSN_LOCATOR (peep2_insn_data[i].insn));
3319
  before_try = PREV_INSN (insn);
3320
  delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3321
 
3322
  /* Re-insert the EH_REGION notes.  */
3323
  if (eh_note || (was_call && nonlocal_goto_handler_labels))
3324
    {
3325
      edge eh_edge;
3326
      edge_iterator ei;
3327
 
3328
      FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3329
        if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3330
          break;
3331
 
3332
      if (eh_note)
3333
        copy_reg_eh_region_note_backward (eh_note, last, before_try);
3334
 
3335
      if (eh_edge)
3336
        for (x = last; x != before_try; x = PREV_INSN (x))
3337
          if (x != BB_END (bb)
3338
              && (can_throw_internal (x)
3339
                  || can_nonlocal_goto (x)))
3340
            {
3341
              edge nfte, nehe;
3342
              int flags;
3343
 
3344
              nfte = split_block (bb, x);
3345
              flags = (eh_edge->flags
3346
                       & (EDGE_EH | EDGE_ABNORMAL));
3347
              if (CALL_P (x))
3348
                flags |= EDGE_ABNORMAL_CALL;
3349
              nehe = make_edge (nfte->src, eh_edge->dest,
3350
                                flags);
3351
 
3352
              nehe->probability = eh_edge->probability;
3353
              nfte->probability
3354
                = REG_BR_PROB_BASE - nehe->probability;
3355
 
3356
              peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3357
              bb = nfte->src;
3358
              eh_edge = nehe;
3359
            }
3360
 
3361
      /* Converting possibly trapping insn to non-trapping is
3362
         possible.  Zap dummy outgoing edges.  */
3363
      peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3364
    }
3365
 
3366
  /* Re-insert the ARGS_SIZE notes.  */
3367
  if (as_note)
3368
    fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3369
 
3370
  /* If we generated a jump instruction, it won't have
3371
     JUMP_LABEL set.  Recompute after we're done.  */
3372
  for (x = last; x != before_try; x = PREV_INSN (x))
3373
    if (JUMP_P (x))
3374
      {
3375
        peep2_do_rebuild_jump_labels = true;
3376
        break;
3377
      }
3378
 
3379
  return last;
3380
}
3381
 
3382
/* After performing a replacement in basic block BB, fix up the life
3383
   information in our buffer.  LAST is the last of the insns that we
3384
   emitted as a replacement.  PREV is the insn before the start of
3385
   the replacement.  MATCH_LEN is the number of instructions that were
3386
   matched, and which now need to be replaced in the buffer.  */
3387
 
3388
static void
3389
peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3390
{
3391
  int i = peep2_buf_position (peep2_current + match_len + 1);
3392
  rtx x;
3393
  regset_head live;
3394
 
3395
  INIT_REG_SET (&live);
3396
  COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3397
 
3398
  gcc_assert (peep2_current_count >= match_len + 1);
3399
  peep2_current_count -= match_len + 1;
3400
 
3401
  x = last;
3402
  do
3403
    {
3404
      if (INSN_P (x))
3405
        {
3406
          df_insn_rescan (x);
3407
          if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3408
            {
3409
              peep2_current_count++;
3410
              if (--i < 0)
3411
                i = MAX_INSNS_PER_PEEP2;
3412
              peep2_insn_data[i].insn = x;
3413
              df_simulate_one_insn_backwards (bb, x, &live);
3414
              COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3415
            }
3416
        }
3417
      x = PREV_INSN (x);
3418
    }
3419
  while (x != prev);
3420
  CLEAR_REG_SET (&live);
3421
 
3422
  peep2_current = i;
3423
}
3424
 
3425
/* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3426
   Return true if we added it, false otherwise.  The caller will try to match
3427
   peepholes against the buffer if we return false; otherwise it will try to
3428
   add more instructions to the buffer.  */
3429
 
3430
static bool
3431
peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3432
{
3433
  int pos;
3434
 
3435
  /* Once we have filled the maximum number of insns the buffer can hold,
3436
     allow the caller to match the insns against peepholes.  We wait until
3437
     the buffer is full in case the target has similar peepholes of different
3438
     length; we always want to match the longest if possible.  */
3439
  if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3440
    return false;
3441
 
3442
  /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3443
     any other pattern, lest it change the semantics of the frame info.  */
3444
  if (RTX_FRAME_RELATED_P (insn))
3445
    {
3446
      /* Let the buffer drain first.  */
3447
      if (peep2_current_count > 0)
3448
        return false;
3449
      /* Now the insn will be the only thing in the buffer.  */
3450
    }
3451
 
3452
  pos = peep2_buf_position (peep2_current + peep2_current_count);
3453
  peep2_insn_data[pos].insn = insn;
3454
  COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3455
  peep2_current_count++;
3456
 
3457
  df_simulate_one_insn_forwards (bb, insn, live);
3458
  return true;
3459
}
3460
 
3461
/* Perform the peephole2 optimization pass.  */
3462
 
3463
static void
3464
peephole2_optimize (void)
3465
{
3466
  rtx insn;
3467
  bitmap live;
3468
  int i;
3469
  basic_block bb;
3470
 
3471
  peep2_do_cleanup_cfg = false;
3472
  peep2_do_rebuild_jump_labels = false;
3473
 
3474
  df_set_flags (DF_LR_RUN_DCE);
3475
  df_note_add_problem ();
3476
  df_analyze ();
3477
 
3478
  /* Initialize the regsets we're going to use.  */
3479
  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3480
    peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3481
  live = BITMAP_ALLOC (&reg_obstack);
3482
 
3483
  FOR_EACH_BB_REVERSE (bb)
3484
    {
3485
      bool past_end = false;
3486
      int pos;
3487
 
3488
      rtl_profile_for_bb (bb);
3489
 
3490
      /* Start up propagation.  */
3491
      bitmap_copy (live, DF_LR_IN (bb));
3492
      df_simulate_initialize_forwards (bb, live);
3493
      peep2_reinit_state (live);
3494
 
3495
      insn = BB_HEAD (bb);
3496
      for (;;)
3497
        {
3498
          rtx attempt, head;
3499
          int match_len;
3500
 
3501
          if (!past_end && !NONDEBUG_INSN_P (insn))
3502
            {
3503
            next_insn:
3504
              insn = NEXT_INSN (insn);
3505
              if (insn == NEXT_INSN (BB_END (bb)))
3506
                past_end = true;
3507
              continue;
3508
            }
3509
          if (!past_end && peep2_fill_buffer (bb, insn, live))
3510
            goto next_insn;
3511
 
3512
          /* If we did not fill an empty buffer, it signals the end of the
3513
             block.  */
3514
          if (peep2_current_count == 0)
3515
            break;
3516
 
3517
          /* The buffer filled to the current maximum, so try to match.  */
3518
 
3519
          pos = peep2_buf_position (peep2_current + peep2_current_count);
3520
          peep2_insn_data[pos].insn = PEEP2_EOB;
3521
          COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3522
 
3523
          /* Match the peephole.  */
3524
          head = peep2_insn_data[peep2_current].insn;
3525
          attempt = peephole2_insns (PATTERN (head), head, &match_len);
3526
          if (attempt != NULL)
3527
            {
3528
              rtx last = peep2_attempt (bb, head, match_len, attempt);
3529
              if (last)
3530
                {
3531
                  peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3532
                  continue;
3533
                }
3534
            }
3535
 
3536
          /* No match: advance the buffer by one insn.  */
3537
          peep2_current = peep2_buf_position (peep2_current + 1);
3538
          peep2_current_count--;
3539
        }
3540
    }
3541
 
3542
  default_rtl_profile ();
3543
  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3544
    BITMAP_FREE (peep2_insn_data[i].live_before);
3545
  BITMAP_FREE (live);
3546
  if (peep2_do_rebuild_jump_labels)
3547
    rebuild_jump_labels (get_insns ());
3548
}
3549
#endif /* HAVE_peephole2 */
3550
 
3551
/* Common predicates for use with define_bypass.  */
3552
 
3553
/* True if the dependency between OUT_INSN and IN_INSN is on the store
3554
   data not the address operand(s) of the store.  IN_INSN and OUT_INSN
3555
   must be either a single_set or a PARALLEL with SETs inside.  */
3556
 
3557
int
3558
store_data_bypass_p (rtx out_insn, rtx in_insn)
3559
{
3560
  rtx out_set, in_set;
3561
  rtx out_pat, in_pat;
3562
  rtx out_exp, in_exp;
3563
  int i, j;
3564
 
3565
  in_set = single_set (in_insn);
3566
  if (in_set)
3567
    {
3568
      if (!MEM_P (SET_DEST (in_set)))
3569
        return false;
3570
 
3571
      out_set = single_set (out_insn);
3572
      if (out_set)
3573
        {
3574
          if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3575
            return false;
3576
        }
3577
      else
3578
        {
3579
          out_pat = PATTERN (out_insn);
3580
 
3581
          if (GET_CODE (out_pat) != PARALLEL)
3582
            return false;
3583
 
3584
          for (i = 0; i < XVECLEN (out_pat, 0); i++)
3585
          {
3586
            out_exp = XVECEXP (out_pat, 0, i);
3587
 
3588
            if (GET_CODE (out_exp) == CLOBBER)
3589
              continue;
3590
 
3591
            gcc_assert (GET_CODE (out_exp) == SET);
3592
 
3593
            if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3594
              return false;
3595
          }
3596
      }
3597
    }
3598
  else
3599
    {
3600
      in_pat = PATTERN (in_insn);
3601
      gcc_assert (GET_CODE (in_pat) == PARALLEL);
3602
 
3603
      for (i = 0; i < XVECLEN (in_pat, 0); i++)
3604
        {
3605
          in_exp = XVECEXP (in_pat, 0, i);
3606
 
3607
          if (GET_CODE (in_exp) == CLOBBER)
3608
            continue;
3609
 
3610
          gcc_assert (GET_CODE (in_exp) == SET);
3611
 
3612
          if (!MEM_P (SET_DEST (in_exp)))
3613
            return false;
3614
 
3615
          out_set = single_set (out_insn);
3616
          if (out_set)
3617
            {
3618
              if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3619
                return false;
3620
            }
3621
          else
3622
            {
3623
              out_pat = PATTERN (out_insn);
3624
              gcc_assert (GET_CODE (out_pat) == PARALLEL);
3625
 
3626
              for (j = 0; j < XVECLEN (out_pat, 0); j++)
3627
                {
3628
                  out_exp = XVECEXP (out_pat, 0, j);
3629
 
3630
                  if (GET_CODE (out_exp) == CLOBBER)
3631
                    continue;
3632
 
3633
                  gcc_assert (GET_CODE (out_exp) == SET);
3634
 
3635
                  if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3636
                    return false;
3637
                }
3638
            }
3639
        }
3640
    }
3641
 
3642
  return true;
3643
}
3644
 
3645
/* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3646
   condition, and not the THEN or ELSE branch.  OUT_INSN may be either a single
3647
   or multiple set; IN_INSN should be single_set for truth, but for convenience
3648
   of insn categorization may be any JUMP or CALL insn.  */
3649
 
3650
int
3651
if_test_bypass_p (rtx out_insn, rtx in_insn)
3652
{
3653
  rtx out_set, in_set;
3654
 
3655
  in_set = single_set (in_insn);
3656
  if (! in_set)
3657
    {
3658
      gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3659
      return false;
3660
    }
3661
 
3662
  if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3663
    return false;
3664
  in_set = SET_SRC (in_set);
3665
 
3666
  out_set = single_set (out_insn);
3667
  if (out_set)
3668
    {
3669
      if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3670
          || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3671
        return false;
3672
    }
3673
  else
3674
    {
3675
      rtx out_pat;
3676
      int i;
3677
 
3678
      out_pat = PATTERN (out_insn);
3679
      gcc_assert (GET_CODE (out_pat) == PARALLEL);
3680
 
3681
      for (i = 0; i < XVECLEN (out_pat, 0); i++)
3682
        {
3683
          rtx exp = XVECEXP (out_pat, 0, i);
3684
 
3685
          if (GET_CODE (exp) == CLOBBER)
3686
            continue;
3687
 
3688
          gcc_assert (GET_CODE (exp) == SET);
3689
 
3690
          if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3691
              || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3692
            return false;
3693
        }
3694
    }
3695
 
3696
  return true;
3697
}
3698
 
3699
static bool
3700
gate_handle_peephole2 (void)
3701
{
3702
  return (optimize > 0 && flag_peephole2);
3703
}
3704
 
3705
static unsigned int
3706
rest_of_handle_peephole2 (void)
3707
{
3708
#ifdef HAVE_peephole2
3709
  peephole2_optimize ();
3710
#endif
3711
  return 0;
3712
}
3713
 
3714
struct rtl_opt_pass pass_peephole2 =
3715
{
3716
 {
3717
  RTL_PASS,
3718
  "peephole2",                          /* name */
3719
  gate_handle_peephole2,                /* gate */
3720
  rest_of_handle_peephole2,             /* execute */
3721
  NULL,                                 /* sub */
3722
  NULL,                                 /* next */
3723
  0,                                    /* static_pass_number */
3724
  TV_PEEPHOLE2,                         /* tv_id */
3725
  0,                                    /* properties_required */
3726
  0,                                    /* properties_provided */
3727
  0,                                    /* properties_destroyed */
3728
  0,                                    /* todo_flags_start */
3729
  TODO_df_finish | TODO_verify_rtl_sharing |
3730
 
3731
 }
3732
};
3733
 
3734
static unsigned int
3735
rest_of_handle_split_all_insns (void)
3736
{
3737
  split_all_insns ();
3738
  return 0;
3739
}
3740
 
3741
struct rtl_opt_pass pass_split_all_insns =
3742
{
3743
 {
3744
  RTL_PASS,
3745
  "split1",                             /* name */
3746
  NULL,                                 /* gate */
3747
  rest_of_handle_split_all_insns,       /* execute */
3748
  NULL,                                 /* sub */
3749
  NULL,                                 /* next */
3750
  0,                                    /* static_pass_number */
3751
  TV_NONE,                              /* tv_id */
3752
  0,                                    /* properties_required */
3753
  0,                                    /* properties_provided */
3754
  0,                                    /* properties_destroyed */
3755
  0,                                    /* todo_flags_start */
3756
 
3757
 }
3758
};
3759
 
3760
static unsigned int
3761
rest_of_handle_split_after_reload (void)
3762
{
3763
  /* If optimizing, then go ahead and split insns now.  */
3764
#ifndef STACK_REGS
3765
  if (optimize > 0)
3766
#endif
3767
    split_all_insns ();
3768
  return 0;
3769
}
3770
 
3771
struct rtl_opt_pass pass_split_after_reload =
3772
{
3773
 {
3774
  RTL_PASS,
3775
  "split2",                             /* name */
3776
  NULL,                                 /* gate */
3777
  rest_of_handle_split_after_reload,    /* execute */
3778
  NULL,                                 /* sub */
3779
  NULL,                                 /* next */
3780
  0,                                    /* static_pass_number */
3781
  TV_NONE,                              /* tv_id */
3782
  0,                                    /* properties_required */
3783
  0,                                    /* properties_provided */
3784
  0,                                    /* properties_destroyed */
3785
  0,                                    /* todo_flags_start */
3786
 
3787
 }
3788
};
3789
 
3790
static bool
3791
gate_handle_split_before_regstack (void)
3792
{
3793
#if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3794
  /* If flow2 creates new instructions which need splitting
3795
     and scheduling after reload is not done, they might not be
3796
     split until final which doesn't allow splitting
3797
     if HAVE_ATTR_length.  */
3798
# ifdef INSN_SCHEDULING
3799
  return (optimize && !flag_schedule_insns_after_reload);
3800
# else
3801
  return (optimize);
3802
# endif
3803
#else
3804
  return 0;
3805
#endif
3806
}
3807
 
3808
static unsigned int
3809
rest_of_handle_split_before_regstack (void)
3810
{
3811
  split_all_insns ();
3812
  return 0;
3813
}
3814
 
3815
struct rtl_opt_pass pass_split_before_regstack =
3816
{
3817
 {
3818
  RTL_PASS,
3819
  "split3",                             /* name */
3820
  gate_handle_split_before_regstack,    /* gate */
3821
  rest_of_handle_split_before_regstack, /* execute */
3822
  NULL,                                 /* sub */
3823
  NULL,                                 /* next */
3824
  0,                                    /* static_pass_number */
3825
  TV_NONE,                              /* tv_id */
3826
  0,                                    /* properties_required */
3827
  0,                                    /* properties_provided */
3828
  0,                                    /* properties_destroyed */
3829
  0,                                    /* todo_flags_start */
3830
 
3831
 }
3832
};
3833
 
3834
static bool
3835
gate_handle_split_before_sched2 (void)
3836
{
3837
#ifdef INSN_SCHEDULING
3838
  return optimize > 0 && flag_schedule_insns_after_reload;
3839
#else
3840
  return 0;
3841
#endif
3842
}
3843
 
3844
static unsigned int
3845
rest_of_handle_split_before_sched2 (void)
3846
{
3847
#ifdef INSN_SCHEDULING
3848
  split_all_insns ();
3849
#endif
3850
  return 0;
3851
}
3852
 
3853
struct rtl_opt_pass pass_split_before_sched2 =
3854
{
3855
 {
3856
  RTL_PASS,
3857
  "split4",                             /* name */
3858
  gate_handle_split_before_sched2,      /* gate */
3859
  rest_of_handle_split_before_sched2,   /* execute */
3860
  NULL,                                 /* sub */
3861
  NULL,                                 /* next */
3862
  0,                                    /* static_pass_number */
3863
  TV_NONE,                              /* tv_id */
3864
  0,                                    /* properties_required */
3865
  0,                                    /* properties_provided */
3866
  0,                                    /* properties_destroyed */
3867
  0,                                    /* todo_flags_start */
3868
  TODO_verify_flow                      /* todo_flags_finish */
3869
 }
3870
};
3871
 
3872
/* The placement of the splitting that we do for shorten_branches
3873
   depends on whether regstack is used by the target or not.  */
3874
static bool
3875
gate_do_final_split (void)
3876
{
3877
#if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3878
  return 1;
3879
#else
3880
  return 0;
3881
#endif
3882
}
3883
 
3884
struct rtl_opt_pass pass_split_for_shorten_branches =
3885
{
3886
 {
3887
  RTL_PASS,
3888
  "split5",                             /* name */
3889
  gate_do_final_split,                  /* gate */
3890
  split_all_insns_noflow,               /* execute */
3891
  NULL,                                 /* sub */
3892
  NULL,                                 /* next */
3893
  0,                                    /* static_pass_number */
3894
  TV_NONE,                              /* tv_id */
3895
  0,                                    /* properties_required */
3896
  0,                                    /* properties_provided */
3897
  0,                                    /* properties_destroyed */
3898
  0,                                    /* todo_flags_start */
3899
  TODO_verify_rtl_sharing               /* todo_flags_finish */
3900
 }
3901
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.