OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [recog.c] - Blame information for rev 20

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Subroutines used by or related to instruction recognition.
2
   Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 2, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING.  If not, write to the Free
19
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20
02110-1301, USA.  */
21
 
22
 
23
#include "config.h"
24
#include "system.h"
25
#include "coretypes.h"
26
#include "tm.h"
27
#include "rtl.h"
28
#include "tm_p.h"
29
#include "insn-config.h"
30
#include "insn-attr.h"
31
#include "hard-reg-set.h"
32
#include "recog.h"
33
#include "regs.h"
34
#include "expr.h"
35
#include "function.h"
36
#include "flags.h"
37
#include "real.h"
38
#include "toplev.h"
39
#include "basic-block.h"
40
#include "output.h"
41
#include "reload.h"
42
#include "timevar.h"
43
#include "tree-pass.h"
44
 
45
#ifndef STACK_PUSH_CODE
46
#ifdef STACK_GROWS_DOWNWARD
47
#define STACK_PUSH_CODE PRE_DEC
48
#else
49
#define STACK_PUSH_CODE PRE_INC
50
#endif
51
#endif
52
 
53
#ifndef STACK_POP_CODE
54
#ifdef STACK_GROWS_DOWNWARD
55
#define STACK_POP_CODE POST_INC
56
#else
57
#define STACK_POP_CODE POST_DEC
58
#endif
59
#endif
60
 
61
static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
62
static rtx *find_single_use_1 (rtx, rtx *);
63
static void validate_replace_src_1 (rtx *, void *);
64
static rtx split_insn (rtx);
65
 
66
/* Nonzero means allow operands to be volatile.
67
   This should be 0 if you are generating rtl, such as if you are calling
68
   the functions in optabs.c and expmed.c (most of the time).
69
   This should be 1 if all valid insns need to be recognized,
70
   such as in regclass.c and final.c and reload.c.
71
 
72
   init_recog and init_recog_no_volatile are responsible for setting this.  */
73
 
74
int volatile_ok;
75
 
76
struct recog_data recog_data;
77
 
78
/* Contains a vector of operand_alternative structures for every operand.
79
   Set up by preprocess_constraints.  */
80
struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
81
 
82
/* On return from `constrain_operands', indicate which alternative
83
   was satisfied.  */
84
 
85
int which_alternative;
86
 
87
/* Nonzero after end of reload pass.
88
   Set to 1 or 0 by toplev.c.
89
   Controls the significance of (SUBREG (MEM)).  */
90
 
91
int reload_completed;
92
 
93
/* Nonzero after thread_prologue_and_epilogue_insns has run.  */
94
int epilogue_completed;
95
 
96
/* Initialize data used by the function `recog'.
97
   This must be called once in the compilation of a function
98
   before any insn recognition may be done in the function.  */
99
 
100
void
101
init_recog_no_volatile (void)
102
{
103
  volatile_ok = 0;
104
}
105
 
106
void
107
init_recog (void)
108
{
109
  volatile_ok = 1;
110
}
111
 
112
 
113
/* Check that X is an insn-body for an `asm' with operands
114
   and that the operands mentioned in it are legitimate.  */
115
 
116
int
117
check_asm_operands (rtx x)
118
{
119
  int noperands;
120
  rtx *operands;
121
  const char **constraints;
122
  int i;
123
 
124
  /* Post-reload, be more strict with things.  */
125
  if (reload_completed)
126
    {
127
      /* ??? Doh!  We've not got the wrapping insn.  Cook one up.  */
128
      extract_insn (make_insn_raw (x));
129
      constrain_operands (1);
130
      return which_alternative >= 0;
131
    }
132
 
133
  noperands = asm_noperands (x);
134
  if (noperands < 0)
135
    return 0;
136
  if (noperands == 0)
137
    return 1;
138
 
139
  operands = alloca (noperands * sizeof (rtx));
140
  constraints = alloca (noperands * sizeof (char *));
141
 
142
  decode_asm_operands (x, operands, NULL, constraints, NULL);
143
 
144
  for (i = 0; i < noperands; i++)
145
    {
146
      const char *c = constraints[i];
147
      if (c[0] == '%')
148
        c++;
149
      if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
150
        c = constraints[c[0] - '0'];
151
 
152
      if (! asm_operand_ok (operands[i], c))
153
        return 0;
154
    }
155
 
156
  return 1;
157
}
158
 
159
/* Static data for the next two routines.  */
160
 
161
typedef struct change_t
162
{
163
  rtx object;
164
  int old_code;
165
  rtx *loc;
166
  rtx old;
167
} change_t;
168
 
169
static change_t *changes;
170
static int changes_allocated;
171
 
172
static int num_changes = 0;
173
 
174
/* Validate a proposed change to OBJECT.  LOC is the location in the rtl
175
   at which NEW will be placed.  If OBJECT is zero, no validation is done,
176
   the change is simply made.
177
 
178
   Two types of objects are supported:  If OBJECT is a MEM, memory_address_p
179
   will be called with the address and mode as parameters.  If OBJECT is
180
   an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
181
   the change in place.
182
 
183
   IN_GROUP is nonzero if this is part of a group of changes that must be
184
   performed as a group.  In that case, the changes will be stored.  The
185
   function `apply_change_group' will validate and apply the changes.
186
 
187
   If IN_GROUP is zero, this is a single change.  Try to recognize the insn
188
   or validate the memory reference with the change applied.  If the result
189
   is not valid for the machine, suppress the change and return zero.
190
   Otherwise, perform the change and return 1.  */
191
 
192
int
193
validate_change (rtx object, rtx *loc, rtx new, int in_group)
194
{
195
  rtx old = *loc;
196
 
197
  if (old == new || rtx_equal_p (old, new))
198
    return 1;
199
 
200
  gcc_assert (in_group != 0 || num_changes == 0);
201
 
202
  *loc = new;
203
 
204
  /* Save the information describing this change.  */
205
  if (num_changes >= changes_allocated)
206
    {
207
      if (changes_allocated == 0)
208
        /* This value allows for repeated substitutions inside complex
209
           indexed addresses, or changes in up to 5 insns.  */
210
        changes_allocated = MAX_RECOG_OPERANDS * 5;
211
      else
212
        changes_allocated *= 2;
213
 
214
      changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
215
    }
216
 
217
  changes[num_changes].object = object;
218
  changes[num_changes].loc = loc;
219
  changes[num_changes].old = old;
220
 
221
  if (object && !MEM_P (object))
222
    {
223
      /* Set INSN_CODE to force rerecognition of insn.  Save old code in
224
         case invalid.  */
225
      changes[num_changes].old_code = INSN_CODE (object);
226
      INSN_CODE (object) = -1;
227
    }
228
 
229
  num_changes++;
230
 
231
  /* If we are making a group of changes, return 1.  Otherwise, validate the
232
     change group we made.  */
233
 
234
  if (in_group)
235
    return 1;
236
  else
237
    return apply_change_group ();
238
}
239
 
240
 
241
/* Function to be passed to for_each_rtx to test whether a piece of
242
   RTL contains any mem/v.  */
243
static int
244
volatile_mem_p (rtx *x, void *data ATTRIBUTE_UNUSED)
245
{
246
  return (MEM_P (*x) && MEM_VOLATILE_P (*x));
247
}
248
 
249
/* Same as validate_change, but doesn't support groups, and it accepts
250
   volatile mems if they're already present in the original insn.  */
251
 
252
int
253
validate_change_maybe_volatile (rtx object, rtx *loc, rtx new)
254
{
255
  int result;
256
 
257
  if (validate_change (object, loc, new, 0))
258
    return 1;
259
 
260
  if (volatile_ok
261
      /* If there isn't a volatile MEM, there's nothing we can do.  */
262
      || !for_each_rtx (&PATTERN (object), volatile_mem_p, 0)
263
      /* Make sure we're not adding or removing volatile MEMs.  */
264
      || for_each_rtx (loc, volatile_mem_p, 0)
265
      || for_each_rtx (&new, volatile_mem_p, 0)
266
      || !insn_invalid_p (object))
267
    return 0;
268
 
269
  volatile_ok = 1;
270
 
271
  gcc_assert (!insn_invalid_p (object));
272
 
273
  result = validate_change (object, loc, new, 0);
274
 
275
  volatile_ok = 0;
276
 
277
  return result;
278
}
279
 
280
/* This subroutine of apply_change_group verifies whether the changes to INSN
281
   were valid; i.e. whether INSN can still be recognized.  */
282
 
283
int
284
insn_invalid_p (rtx insn)
285
{
286
  rtx pat = PATTERN (insn);
287
  int num_clobbers = 0;
288
  /* If we are before reload and the pattern is a SET, see if we can add
289
     clobbers.  */
290
  int icode = recog (pat, insn,
291
                     (GET_CODE (pat) == SET
292
                      && ! reload_completed && ! reload_in_progress)
293
                     ? &num_clobbers : 0);
294
  int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
295
 
296
 
297
  /* If this is an asm and the operand aren't legal, then fail.  Likewise if
298
     this is not an asm and the insn wasn't recognized.  */
299
  if ((is_asm && ! check_asm_operands (PATTERN (insn)))
300
      || (!is_asm && icode < 0))
301
    return 1;
302
 
303
  /* If we have to add CLOBBERs, fail if we have to add ones that reference
304
     hard registers since our callers can't know if they are live or not.
305
     Otherwise, add them.  */
306
  if (num_clobbers > 0)
307
    {
308
      rtx newpat;
309
 
310
      if (added_clobbers_hard_reg_p (icode))
311
        return 1;
312
 
313
      newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
314
      XVECEXP (newpat, 0, 0) = pat;
315
      add_clobbers (newpat, icode);
316
      PATTERN (insn) = pat = newpat;
317
    }
318
 
319
  /* After reload, verify that all constraints are satisfied.  */
320
  if (reload_completed)
321
    {
322
      extract_insn (insn);
323
 
324
      if (! constrain_operands (1))
325
        return 1;
326
    }
327
 
328
  INSN_CODE (insn) = icode;
329
  return 0;
330
}
331
 
332
/* Return number of changes made and not validated yet.  */
333
int
334
num_changes_pending (void)
335
{
336
  return num_changes;
337
}
338
 
339
/* Tentatively apply the changes numbered NUM and up.
340
   Return 1 if all changes are valid, zero otherwise.  */
341
 
342
static int
343
verify_changes (int num)
344
{
345
  int i;
346
  rtx last_validated = NULL_RTX;
347
 
348
  /* The changes have been applied and all INSN_CODEs have been reset to force
349
     rerecognition.
350
 
351
     The changes are valid if we aren't given an object, or if we are
352
     given a MEM and it still is a valid address, or if this is in insn
353
     and it is recognized.  In the latter case, if reload has completed,
354
     we also require that the operands meet the constraints for
355
     the insn.  */
356
 
357
  for (i = num; i < num_changes; i++)
358
    {
359
      rtx object = changes[i].object;
360
 
361
      /* If there is no object to test or if it is the same as the one we
362
         already tested, ignore it.  */
363
      if (object == 0 || object == last_validated)
364
        continue;
365
 
366
      if (MEM_P (object))
367
        {
368
          if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
369
            break;
370
        }
371
      else if (insn_invalid_p (object))
372
        {
373
          rtx pat = PATTERN (object);
374
 
375
          /* Perhaps we couldn't recognize the insn because there were
376
             extra CLOBBERs at the end.  If so, try to re-recognize
377
             without the last CLOBBER (later iterations will cause each of
378
             them to be eliminated, in turn).  But don't do this if we
379
             have an ASM_OPERAND.  */
380
          if (GET_CODE (pat) == PARALLEL
381
              && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
382
              && asm_noperands (PATTERN (object)) < 0)
383
            {
384
              rtx newpat;
385
 
386
              if (XVECLEN (pat, 0) == 2)
387
                newpat = XVECEXP (pat, 0, 0);
388
              else
389
                {
390
                  int j;
391
 
392
                  newpat
393
                    = gen_rtx_PARALLEL (VOIDmode,
394
                                        rtvec_alloc (XVECLEN (pat, 0) - 1));
395
                  for (j = 0; j < XVECLEN (newpat, 0); j++)
396
                    XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
397
                }
398
 
399
              /* Add a new change to this group to replace the pattern
400
                 with this new pattern.  Then consider this change
401
                 as having succeeded.  The change we added will
402
                 cause the entire call to fail if things remain invalid.
403
 
404
                 Note that this can lose if a later change than the one
405
                 we are processing specified &XVECEXP (PATTERN (object), 0, X)
406
                 but this shouldn't occur.  */
407
 
408
              validate_change (object, &PATTERN (object), newpat, 1);
409
              continue;
410
            }
411
          else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
412
            /* If this insn is a CLOBBER or USE, it is always valid, but is
413
               never recognized.  */
414
            continue;
415
          else
416
            break;
417
        }
418
      last_validated = object;
419
    }
420
 
421
  return (i == num_changes);
422
}
423
 
424
/* A group of changes has previously been issued with validate_change and
425
   verified with verify_changes.  Update the BB_DIRTY flags of the affected
426
   blocks, and clear num_changes.  */
427
 
428
void
429
confirm_change_group (void)
430
{
431
  int i;
432
  basic_block bb;
433
 
434
  for (i = 0; i < num_changes; i++)
435
    if (changes[i].object
436
        && INSN_P (changes[i].object)
437
        && (bb = BLOCK_FOR_INSN (changes[i].object)))
438
      bb->flags |= BB_DIRTY;
439
 
440
  num_changes = 0;
441
}
442
 
443
/* Apply a group of changes previously issued with `validate_change'.
444
   If all changes are valid, call confirm_change_group and return 1,
445
   otherwise, call cancel_changes and return 0.  */
446
 
447
int
448
apply_change_group (void)
449
{
450
  if (verify_changes (0))
451
    {
452
      confirm_change_group ();
453
      return 1;
454
    }
455
  else
456
    {
457
      cancel_changes (0);
458
      return 0;
459
    }
460
}
461
 
462
 
463
/* Return the number of changes so far in the current group.  */
464
 
465
int
466
num_validated_changes (void)
467
{
468
  return num_changes;
469
}
470
 
471
/* Retract the changes numbered NUM and up.  */
472
 
473
void
474
cancel_changes (int num)
475
{
476
  int i;
477
 
478
  /* Back out all the changes.  Do this in the opposite order in which
479
     they were made.  */
480
  for (i = num_changes - 1; i >= num; i--)
481
    {
482
      *changes[i].loc = changes[i].old;
483
      if (changes[i].object && !MEM_P (changes[i].object))
484
        INSN_CODE (changes[i].object) = changes[i].old_code;
485
    }
486
  num_changes = num;
487
}
488
 
489
/* Replace every occurrence of FROM in X with TO.  Mark each change with
490
   validate_change passing OBJECT.  */
491
 
492
static void
493
validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
494
{
495
  int i, j;
496
  const char *fmt;
497
  rtx x = *loc;
498
  enum rtx_code code;
499
  enum machine_mode op0_mode = VOIDmode;
500
  int prev_changes = num_changes;
501
  rtx new;
502
 
503
  if (!x)
504
    return;
505
 
506
  code = GET_CODE (x);
507
  fmt = GET_RTX_FORMAT (code);
508
  if (fmt[0] == 'e')
509
    op0_mode = GET_MODE (XEXP (x, 0));
510
 
511
  /* X matches FROM if it is the same rtx or they are both referring to the
512
     same register in the same mode.  Avoid calling rtx_equal_p unless the
513
     operands look similar.  */
514
 
515
  if (x == from
516
      || (REG_P (x) && REG_P (from)
517
          && GET_MODE (x) == GET_MODE (from)
518
          && REGNO (x) == REGNO (from))
519
      || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
520
          && rtx_equal_p (x, from)))
521
    {
522
      validate_change (object, loc, to, 1);
523
      return;
524
    }
525
 
526
  /* Call ourself recursively to perform the replacements.
527
     We must not replace inside already replaced expression, otherwise we
528
     get infinite recursion for replacements like (reg X)->(subreg (reg X))
529
     done by regmove, so we must special case shared ASM_OPERANDS.  */
530
 
531
  if (GET_CODE (x) == PARALLEL)
532
    {
533
      for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
534
        {
535
          if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
536
              && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
537
            {
538
              /* Verify that operands are really shared.  */
539
              gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
540
                          == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
541
                                                              (x, 0, j))));
542
              validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
543
                                      from, to, object);
544
            }
545
          else
546
            validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
547
        }
548
    }
549
  else
550
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
551
      {
552
        if (fmt[i] == 'e')
553
          validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
554
        else if (fmt[i] == 'E')
555
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
556
            validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
557
      }
558
 
559
  /* If we didn't substitute, there is nothing more to do.  */
560
  if (num_changes == prev_changes)
561
    return;
562
 
563
  /* Allow substituted expression to have different mode.  This is used by
564
     regmove to change mode of pseudo register.  */
565
  if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
566
    op0_mode = GET_MODE (XEXP (x, 0));
567
 
568
  /* Do changes needed to keep rtx consistent.  Don't do any other
569
     simplifications, as it is not our job.  */
570
 
571
  if (SWAPPABLE_OPERANDS_P (x)
572
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
573
    {
574
      validate_change (object, loc,
575
                       gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
576
                                       : swap_condition (code),
577
                                       GET_MODE (x), XEXP (x, 1),
578
                                       XEXP (x, 0)), 1);
579
      x = *loc;
580
      code = GET_CODE (x);
581
    }
582
 
583
  switch (code)
584
    {
585
    case PLUS:
586
      /* If we have a PLUS whose second operand is now a CONST_INT, use
587
         simplify_gen_binary to try to simplify it.
588
         ??? We may want later to remove this, once simplification is
589
         separated from this function.  */
590
      if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
591
        validate_change (object, loc,
592
                         simplify_gen_binary
593
                         (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
594
      break;
595
    case MINUS:
596
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
597
          || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
598
        validate_change (object, loc,
599
                         simplify_gen_binary
600
                         (PLUS, GET_MODE (x), XEXP (x, 0),
601
                          simplify_gen_unary (NEG,
602
                                              GET_MODE (x), XEXP (x, 1),
603
                                              GET_MODE (x))), 1);
604
      break;
605
    case ZERO_EXTEND:
606
    case SIGN_EXTEND:
607
      if (GET_MODE (XEXP (x, 0)) == VOIDmode)
608
        {
609
          new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
610
                                    op0_mode);
611
          /* If any of the above failed, substitute in something that
612
             we know won't be recognized.  */
613
          if (!new)
614
            new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
615
          validate_change (object, loc, new, 1);
616
        }
617
      break;
618
    case SUBREG:
619
      /* All subregs possible to simplify should be simplified.  */
620
      new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
621
                             SUBREG_BYTE (x));
622
 
623
      /* Subregs of VOIDmode operands are incorrect.  */
624
      if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
625
        new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
626
      if (new)
627
        validate_change (object, loc, new, 1);
628
      break;
629
    case ZERO_EXTRACT:
630
    case SIGN_EXTRACT:
631
      /* If we are replacing a register with memory, try to change the memory
632
         to be the mode required for memory in extract operations (this isn't
633
         likely to be an insertion operation; if it was, nothing bad will
634
         happen, we might just fail in some cases).  */
635
 
636
      if (MEM_P (XEXP (x, 0))
637
          && GET_CODE (XEXP (x, 1)) == CONST_INT
638
          && GET_CODE (XEXP (x, 2)) == CONST_INT
639
          && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
640
          && !MEM_VOLATILE_P (XEXP (x, 0)))
641
        {
642
          enum machine_mode wanted_mode = VOIDmode;
643
          enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
644
          int pos = INTVAL (XEXP (x, 2));
645
 
646
          if (GET_CODE (x) == ZERO_EXTRACT)
647
            {
648
              enum machine_mode new_mode
649
                = mode_for_extraction (EP_extzv, 1);
650
              if (new_mode != MAX_MACHINE_MODE)
651
                wanted_mode = new_mode;
652
            }
653
          else if (GET_CODE (x) == SIGN_EXTRACT)
654
            {
655
              enum machine_mode new_mode
656
                = mode_for_extraction (EP_extv, 1);
657
              if (new_mode != MAX_MACHINE_MODE)
658
                wanted_mode = new_mode;
659
            }
660
 
661
          /* If we have a narrower mode, we can do something.  */
662
          if (wanted_mode != VOIDmode
663
              && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
664
            {
665
              int offset = pos / BITS_PER_UNIT;
666
              rtx newmem;
667
 
668
              /* If the bytes and bits are counted differently, we
669
                 must adjust the offset.  */
670
              if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
671
                offset =
672
                  (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
673
                   offset);
674
 
675
              pos %= GET_MODE_BITSIZE (wanted_mode);
676
 
677
              newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
678
 
679
              validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
680
              validate_change (object, &XEXP (x, 0), newmem, 1);
681
            }
682
        }
683
 
684
      break;
685
 
686
    default:
687
      break;
688
    }
689
}
690
 
691
/* Try replacing every occurrence of FROM in subexpression LOC of INSN
692
   with TO.  After all changes have been made, validate by seeing
693
   if INSN is still valid.  */
694
 
695
int
696
validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
697
{
698
  validate_replace_rtx_1 (loc, from, to, insn);
699
  return apply_change_group ();
700
}
701
 
702
/* Try replacing every occurrence of FROM in INSN with TO.  After all
703
   changes have been made, validate by seeing if INSN is still valid.  */
704
 
705
int
706
validate_replace_rtx (rtx from, rtx to, rtx insn)
707
{
708
  validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
709
  return apply_change_group ();
710
}
711
 
712
/* Try replacing every occurrence of FROM in INSN with TO.  */
713
 
714
void
715
validate_replace_rtx_group (rtx from, rtx to, rtx insn)
716
{
717
  validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
718
}
719
 
720
/* Function called by note_uses to replace used subexpressions.  */
721
struct validate_replace_src_data
722
{
723
  rtx from;                     /* Old RTX */
724
  rtx to;                       /* New RTX */
725
  rtx insn;                     /* Insn in which substitution is occurring.  */
726
};
727
 
728
static void
729
validate_replace_src_1 (rtx *x, void *data)
730
{
731
  struct validate_replace_src_data *d
732
    = (struct validate_replace_src_data *) data;
733
 
734
  validate_replace_rtx_1 (x, d->from, d->to, d->insn);
735
}
736
 
737
/* Try replacing every occurrence of FROM in INSN with TO, avoiding
738
   SET_DESTs.  */
739
 
740
void
741
validate_replace_src_group (rtx from, rtx to, rtx insn)
742
{
743
  struct validate_replace_src_data d;
744
 
745
  d.from = from;
746
  d.to = to;
747
  d.insn = insn;
748
  note_uses (&PATTERN (insn), validate_replace_src_1, &d);
749
}
750
 
751
#ifdef HAVE_cc0
752
/* Return 1 if the insn using CC0 set by INSN does not contain
753
   any ordered tests applied to the condition codes.
754
   EQ and NE tests do not count.  */
755
 
756
int
757
next_insn_tests_no_inequality (rtx insn)
758
{
759
  rtx next = next_cc0_user (insn);
760
 
761
  /* If there is no next insn, we have to take the conservative choice.  */
762
  if (next == 0)
763
    return 0;
764
 
765
  return (INSN_P (next)
766
          && ! inequality_comparisons_p (PATTERN (next)));
767
}
768
#endif
769
 
770
/* This is used by find_single_use to locate an rtx that contains exactly one
771
   use of DEST, which is typically either a REG or CC0.  It returns a
772
   pointer to the innermost rtx expression containing DEST.  Appearances of
773
   DEST that are being used to totally replace it are not counted.  */
774
 
775
static rtx *
776
find_single_use_1 (rtx dest, rtx *loc)
777
{
778
  rtx x = *loc;
779
  enum rtx_code code = GET_CODE (x);
780
  rtx *result = 0;
781
  rtx *this_result;
782
  int i;
783
  const char *fmt;
784
 
785
  switch (code)
786
    {
787
    case CONST_INT:
788
    case CONST:
789
    case LABEL_REF:
790
    case SYMBOL_REF:
791
    case CONST_DOUBLE:
792
    case CONST_VECTOR:
793
    case CLOBBER:
794
      return 0;
795
 
796
    case SET:
797
      /* If the destination is anything other than CC0, PC, a REG or a SUBREG
798
         of a REG that occupies all of the REG, the insn uses DEST if
799
         it is mentioned in the destination or the source.  Otherwise, we
800
         need just check the source.  */
801
      if (GET_CODE (SET_DEST (x)) != CC0
802
          && GET_CODE (SET_DEST (x)) != PC
803
          && !REG_P (SET_DEST (x))
804
          && ! (GET_CODE (SET_DEST (x)) == SUBREG
805
                && REG_P (SUBREG_REG (SET_DEST (x)))
806
                && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
807
                      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
808
                    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
809
                         + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
810
        break;
811
 
812
      return find_single_use_1 (dest, &SET_SRC (x));
813
 
814
    case MEM:
815
    case SUBREG:
816
      return find_single_use_1 (dest, &XEXP (x, 0));
817
 
818
    default:
819
      break;
820
    }
821
 
822
  /* If it wasn't one of the common cases above, check each expression and
823
     vector of this code.  Look for a unique usage of DEST.  */
824
 
825
  fmt = GET_RTX_FORMAT (code);
826
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
827
    {
828
      if (fmt[i] == 'e')
829
        {
830
          if (dest == XEXP (x, i)
831
              || (REG_P (dest) && REG_P (XEXP (x, i))
832
                  && REGNO (dest) == REGNO (XEXP (x, i))))
833
            this_result = loc;
834
          else
835
            this_result = find_single_use_1 (dest, &XEXP (x, i));
836
 
837
          if (result == 0)
838
            result = this_result;
839
          else if (this_result)
840
            /* Duplicate usage.  */
841
            return 0;
842
        }
843
      else if (fmt[i] == 'E')
844
        {
845
          int j;
846
 
847
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
848
            {
849
              if (XVECEXP (x, i, j) == dest
850
                  || (REG_P (dest)
851
                      && REG_P (XVECEXP (x, i, j))
852
                      && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
853
                this_result = loc;
854
              else
855
                this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
856
 
857
              if (result == 0)
858
                result = this_result;
859
              else if (this_result)
860
                return 0;
861
            }
862
        }
863
    }
864
 
865
  return result;
866
}
867
 
868
/* See if DEST, produced in INSN, is used only a single time in the
869
   sequel.  If so, return a pointer to the innermost rtx expression in which
870
   it is used.
871
 
872
   If PLOC is nonzero, *PLOC is set to the insn containing the single use.
873
 
874
   This routine will return usually zero either before flow is called (because
875
   there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
876
   note can't be trusted).
877
 
878
   If DEST is cc0_rtx, we look only at the next insn.  In that case, we don't
879
   care about REG_DEAD notes or LOG_LINKS.
880
 
881
   Otherwise, we find the single use by finding an insn that has a
882
   LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST.  If DEST is
883
   only referenced once in that insn, we know that it must be the first
884
   and last insn referencing DEST.  */
885
 
886
rtx *
887
find_single_use (rtx dest, rtx insn, rtx *ploc)
888
{
889
  rtx next;
890
  rtx *result;
891
  rtx link;
892
 
893
#ifdef HAVE_cc0
894
  if (dest == cc0_rtx)
895
    {
896
      next = NEXT_INSN (insn);
897
      if (next == 0
898
          || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
899
        return 0;
900
 
901
      result = find_single_use_1 (dest, &PATTERN (next));
902
      if (result && ploc)
903
        *ploc = next;
904
      return result;
905
    }
906
#endif
907
 
908
  if (reload_completed || reload_in_progress || !REG_P (dest))
909
    return 0;
910
 
911
  for (next = next_nonnote_insn (insn);
912
       next != 0 && !LABEL_P (next);
913
       next = next_nonnote_insn (next))
914
    if (INSN_P (next) && dead_or_set_p (next, dest))
915
      {
916
        for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
917
          if (XEXP (link, 0) == insn)
918
            break;
919
 
920
        if (link)
921
          {
922
            result = find_single_use_1 (dest, &PATTERN (next));
923
            if (ploc)
924
              *ploc = next;
925
            return result;
926
          }
927
      }
928
 
929
  return 0;
930
}
931
 
932
/* Return 1 if OP is a valid general operand for machine mode MODE.
933
   This is either a register reference, a memory reference,
934
   or a constant.  In the case of a memory reference, the address
935
   is checked for general validity for the target machine.
936
 
937
   Register and memory references must have mode MODE in order to be valid,
938
   but some constants have no machine mode and are valid for any mode.
939
 
940
   If MODE is VOIDmode, OP is checked for validity for whatever mode
941
   it has.
942
 
943
   The main use of this function is as a predicate in match_operand
944
   expressions in the machine description.
945
 
946
   For an explanation of this function's behavior for registers of
947
   class NO_REGS, see the comment for `register_operand'.  */
948
 
949
int
950
general_operand (rtx op, enum machine_mode mode)
951
{
952
  enum rtx_code code = GET_CODE (op);
953
 
954
  if (mode == VOIDmode)
955
    mode = GET_MODE (op);
956
 
957
  /* Don't accept CONST_INT or anything similar
958
     if the caller wants something floating.  */
959
  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
960
      && GET_MODE_CLASS (mode) != MODE_INT
961
      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
962
    return 0;
963
 
964
  if (GET_CODE (op) == CONST_INT
965
      && mode != VOIDmode
966
      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
967
    return 0;
968
 
969
  if (CONSTANT_P (op))
970
    return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
971
             || mode == VOIDmode)
972
            && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
973
            && LEGITIMATE_CONSTANT_P (op));
974
 
975
  /* Except for certain constants with VOIDmode, already checked for,
976
     OP's mode must match MODE if MODE specifies a mode.  */
977
 
978
  if (GET_MODE (op) != mode)
979
    return 0;
980
 
981
  if (code == SUBREG)
982
    {
983
      rtx sub = SUBREG_REG (op);
984
 
985
#ifdef INSN_SCHEDULING
986
      /* On machines that have insn scheduling, we want all memory
987
         reference to be explicit, so outlaw paradoxical SUBREGs.
988
         However, we must allow them after reload so that they can
989
         get cleaned up by cleanup_subreg_operands.  */
990
      if (!reload_completed && MEM_P (sub)
991
          && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
992
        return 0;
993
#endif
994
      /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
995
         may result in incorrect reference.  We should simplify all valid
996
         subregs of MEM anyway.  But allow this after reload because we
997
         might be called from cleanup_subreg_operands.
998
 
999
         ??? This is a kludge.  */
1000
      if (!reload_completed && SUBREG_BYTE (op) != 0
1001
          && MEM_P (sub))
1002
        return 0;
1003
 
1004
      /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
1005
         create such rtl, and we must reject it.  */
1006
      if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1007
          && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1008
        return 0;
1009
 
1010
      op = sub;
1011
      code = GET_CODE (op);
1012
    }
1013
 
1014
  if (code == REG)
1015
    /* A register whose class is NO_REGS is not a general operand.  */
1016
    return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1017
            || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1018
 
1019
  if (code == MEM)
1020
    {
1021
      rtx y = XEXP (op, 0);
1022
 
1023
      if (! volatile_ok && MEM_VOLATILE_P (op))
1024
        return 0;
1025
 
1026
      /* Use the mem's mode, since it will be reloaded thus.  */
1027
      if (memory_address_p (GET_MODE (op), y))
1028
        return 1;
1029
    }
1030
 
1031
  return 0;
1032
}
1033
 
1034
/* Return 1 if OP is a valid memory address for a memory reference
1035
   of mode MODE.
1036
 
1037
   The main use of this function is as a predicate in match_operand
1038
   expressions in the machine description.  */
1039
 
1040
int
1041
address_operand (rtx op, enum machine_mode mode)
1042
{
1043
  return memory_address_p (mode, op);
1044
}
1045
 
1046
/* Return 1 if OP is a register reference of mode MODE.
1047
   If MODE is VOIDmode, accept a register in any mode.
1048
 
1049
   The main use of this function is as a predicate in match_operand
1050
   expressions in the machine description.
1051
 
1052
   As a special exception, registers whose class is NO_REGS are
1053
   not accepted by `register_operand'.  The reason for this change
1054
   is to allow the representation of special architecture artifacts
1055
   (such as a condition code register) without extending the rtl
1056
   definitions.  Since registers of class NO_REGS cannot be used
1057
   as registers in any case where register classes are examined,
1058
   it is most consistent to keep this function from accepting them.  */
1059
 
1060
int
1061
register_operand (rtx op, enum machine_mode mode)
1062
{
1063
  if (GET_MODE (op) != mode && mode != VOIDmode)
1064
    return 0;
1065
 
1066
  if (GET_CODE (op) == SUBREG)
1067
    {
1068
      rtx sub = SUBREG_REG (op);
1069
 
1070
      /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1071
         because it is guaranteed to be reloaded into one.
1072
         Just make sure the MEM is valid in itself.
1073
         (Ideally, (SUBREG (MEM)...) should not exist after reload,
1074
         but currently it does result from (SUBREG (REG)...) where the
1075
         reg went on the stack.)  */
1076
      if (! reload_completed && MEM_P (sub))
1077
        return general_operand (op, mode);
1078
 
1079
#ifdef CANNOT_CHANGE_MODE_CLASS
1080
      if (REG_P (sub)
1081
          && REGNO (sub) < FIRST_PSEUDO_REGISTER
1082
          && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1083
          && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1084
          && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1085
        return 0;
1086
#endif
1087
 
1088
      /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
1089
         create such rtl, and we must reject it.  */
1090
      if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1091
          && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1092
        return 0;
1093
 
1094
      op = sub;
1095
    }
1096
 
1097
  /* We don't consider registers whose class is NO_REGS
1098
     to be a register operand.  */
1099
  return (REG_P (op)
1100
          && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1101
              || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1102
}
1103
 
1104
/* Return 1 for a register in Pmode; ignore the tested mode.  */
1105
 
1106
int
1107
pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1108
{
1109
  return register_operand (op, Pmode);
1110
}
1111
 
1112
/* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1113
   or a hard register.  */
1114
 
1115
int
1116
scratch_operand (rtx op, enum machine_mode mode)
1117
{
1118
  if (GET_MODE (op) != mode && mode != VOIDmode)
1119
    return 0;
1120
 
1121
  return (GET_CODE (op) == SCRATCH
1122
          || (REG_P (op)
1123
              && REGNO (op) < FIRST_PSEUDO_REGISTER));
1124
}
1125
 
1126
/* Return 1 if OP is a valid immediate operand for mode MODE.
1127
 
1128
   The main use of this function is as a predicate in match_operand
1129
   expressions in the machine description.  */
1130
 
1131
int
1132
immediate_operand (rtx op, enum machine_mode mode)
1133
{
1134
  /* Don't accept CONST_INT or anything similar
1135
     if the caller wants something floating.  */
1136
  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1137
      && GET_MODE_CLASS (mode) != MODE_INT
1138
      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1139
    return 0;
1140
 
1141
  if (GET_CODE (op) == CONST_INT
1142
      && mode != VOIDmode
1143
      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1144
    return 0;
1145
 
1146
  return (CONSTANT_P (op)
1147
          && (GET_MODE (op) == mode || mode == VOIDmode
1148
              || GET_MODE (op) == VOIDmode)
1149
          && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1150
          && LEGITIMATE_CONSTANT_P (op));
1151
}
1152
 
1153
/* Returns 1 if OP is an operand that is a CONST_INT.  */
1154
 
1155
int
1156
const_int_operand (rtx op, enum machine_mode mode)
1157
{
1158
  if (GET_CODE (op) != CONST_INT)
1159
    return 0;
1160
 
1161
  if (mode != VOIDmode
1162
      && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1163
    return 0;
1164
 
1165
  return 1;
1166
}
1167
 
1168
/* Returns 1 if OP is an operand that is a constant integer or constant
1169
   floating-point number.  */
1170
 
1171
int
1172
const_double_operand (rtx op, enum machine_mode mode)
1173
{
1174
  /* Don't accept CONST_INT or anything similar
1175
     if the caller wants something floating.  */
1176
  if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1177
      && GET_MODE_CLASS (mode) != MODE_INT
1178
      && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1179
    return 0;
1180
 
1181
  return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1182
          && (mode == VOIDmode || GET_MODE (op) == mode
1183
              || GET_MODE (op) == VOIDmode));
1184
}
1185
 
1186
/* Return 1 if OP is a general operand that is not an immediate operand.  */
1187
 
1188
int
1189
nonimmediate_operand (rtx op, enum machine_mode mode)
1190
{
1191
  return (general_operand (op, mode) && ! CONSTANT_P (op));
1192
}
1193
 
1194
/* Return 1 if OP is a register reference or immediate value of mode MODE.  */
1195
 
1196
int
1197
nonmemory_operand (rtx op, enum machine_mode mode)
1198
{
1199
  if (CONSTANT_P (op))
1200
    {
1201
      /* Don't accept CONST_INT or anything similar
1202
         if the caller wants something floating.  */
1203
      if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1204
          && GET_MODE_CLASS (mode) != MODE_INT
1205
          && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1206
        return 0;
1207
 
1208
      if (GET_CODE (op) == CONST_INT
1209
          && mode != VOIDmode
1210
          && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1211
        return 0;
1212
 
1213
      return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1214
               || mode == VOIDmode)
1215
              && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1216
              && LEGITIMATE_CONSTANT_P (op));
1217
    }
1218
 
1219
  if (GET_MODE (op) != mode && mode != VOIDmode)
1220
    return 0;
1221
 
1222
  if (GET_CODE (op) == SUBREG)
1223
    {
1224
      /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1225
         because it is guaranteed to be reloaded into one.
1226
         Just make sure the MEM is valid in itself.
1227
         (Ideally, (SUBREG (MEM)...) should not exist after reload,
1228
         but currently it does result from (SUBREG (REG)...) where the
1229
         reg went on the stack.)  */
1230
      if (! reload_completed && MEM_P (SUBREG_REG (op)))
1231
        return general_operand (op, mode);
1232
      op = SUBREG_REG (op);
1233
    }
1234
 
1235
  /* We don't consider registers whose class is NO_REGS
1236
     to be a register operand.  */
1237
  return (REG_P (op)
1238
          && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1239
              || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1240
}
1241
 
1242
/* Return 1 if OP is a valid operand that stands for pushing a
1243
   value of mode MODE onto the stack.
1244
 
1245
   The main use of this function is as a predicate in match_operand
1246
   expressions in the machine description.  */
1247
 
1248
int
1249
push_operand (rtx op, enum machine_mode mode)
1250
{
1251
  unsigned int rounded_size = GET_MODE_SIZE (mode);
1252
 
1253
#ifdef PUSH_ROUNDING
1254
  rounded_size = PUSH_ROUNDING (rounded_size);
1255
#endif
1256
 
1257
  if (!MEM_P (op))
1258
    return 0;
1259
 
1260
  if (mode != VOIDmode && GET_MODE (op) != mode)
1261
    return 0;
1262
 
1263
  op = XEXP (op, 0);
1264
 
1265
  if (rounded_size == GET_MODE_SIZE (mode))
1266
    {
1267
      if (GET_CODE (op) != STACK_PUSH_CODE)
1268
        return 0;
1269
    }
1270
  else
1271
    {
1272
      if (GET_CODE (op) != PRE_MODIFY
1273
          || GET_CODE (XEXP (op, 1)) != PLUS
1274
          || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1275
          || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1276
#ifdef STACK_GROWS_DOWNWARD
1277
          || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1278
#else
1279
          || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1280
#endif
1281
          )
1282
        return 0;
1283
    }
1284
 
1285
  return XEXP (op, 0) == stack_pointer_rtx;
1286
}
1287
 
1288
/* Return 1 if OP is a valid operand that stands for popping a
1289
   value of mode MODE off the stack.
1290
 
1291
   The main use of this function is as a predicate in match_operand
1292
   expressions in the machine description.  */
1293
 
1294
int
1295
pop_operand (rtx op, enum machine_mode mode)
1296
{
1297
  if (!MEM_P (op))
1298
    return 0;
1299
 
1300
  if (mode != VOIDmode && GET_MODE (op) != mode)
1301
    return 0;
1302
 
1303
  op = XEXP (op, 0);
1304
 
1305
  if (GET_CODE (op) != STACK_POP_CODE)
1306
    return 0;
1307
 
1308
  return XEXP (op, 0) == stack_pointer_rtx;
1309
}
1310
 
1311
/* Return 1 if ADDR is a valid memory address for mode MODE.  */
1312
 
1313
int
1314
memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1315
{
1316
  GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1317
  return 0;
1318
 
1319
 win:
1320
  return 1;
1321
}
1322
 
1323
/* Return 1 if OP is a valid memory reference with mode MODE,
1324
   including a valid address.
1325
 
1326
   The main use of this function is as a predicate in match_operand
1327
   expressions in the machine description.  */
1328
 
1329
int
1330
memory_operand (rtx op, enum machine_mode mode)
1331
{
1332
  rtx inner;
1333
 
1334
  if (! reload_completed)
1335
    /* Note that no SUBREG is a memory operand before end of reload pass,
1336
       because (SUBREG (MEM...)) forces reloading into a register.  */
1337
    return MEM_P (op) && general_operand (op, mode);
1338
 
1339
  if (mode != VOIDmode && GET_MODE (op) != mode)
1340
    return 0;
1341
 
1342
  inner = op;
1343
  if (GET_CODE (inner) == SUBREG)
1344
    inner = SUBREG_REG (inner);
1345
 
1346
  return (MEM_P (inner) && general_operand (op, mode));
1347
}
1348
 
1349
/* Return 1 if OP is a valid indirect memory reference with mode MODE;
1350
   that is, a memory reference whose address is a general_operand.  */
1351
 
1352
int
1353
indirect_operand (rtx op, enum machine_mode mode)
1354
{
1355
  /* Before reload, a SUBREG isn't in memory (see memory_operand, above).  */
1356
  if (! reload_completed
1357
      && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1358
    {
1359
      int offset = SUBREG_BYTE (op);
1360
      rtx inner = SUBREG_REG (op);
1361
 
1362
      if (mode != VOIDmode && GET_MODE (op) != mode)
1363
        return 0;
1364
 
1365
      /* The only way that we can have a general_operand as the resulting
1366
         address is if OFFSET is zero and the address already is an operand
1367
         or if the address is (plus Y (const_int -OFFSET)) and Y is an
1368
         operand.  */
1369
 
1370
      return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1371
              || (GET_CODE (XEXP (inner, 0)) == PLUS
1372
                  && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1373
                  && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1374
                  && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1375
    }
1376
 
1377
  return (MEM_P (op)
1378
          && memory_operand (op, mode)
1379
          && general_operand (XEXP (op, 0), Pmode));
1380
}
1381
 
1382
/* Return 1 if this is a comparison operator.  This allows the use of
1383
   MATCH_OPERATOR to recognize all the branch insns.  */
1384
 
1385
int
1386
comparison_operator (rtx op, enum machine_mode mode)
1387
{
1388
  return ((mode == VOIDmode || GET_MODE (op) == mode)
1389
          && COMPARISON_P (op));
1390
}
1391
 
1392
/* If BODY is an insn body that uses ASM_OPERANDS,
1393
   return the number of operands (both input and output) in the insn.
1394
   Otherwise return -1.  */
1395
 
1396
int
1397
asm_noperands (rtx body)
1398
{
1399
  switch (GET_CODE (body))
1400
    {
1401
    case ASM_OPERANDS:
1402
      /* No output operands: return number of input operands.  */
1403
      return ASM_OPERANDS_INPUT_LENGTH (body);
1404
    case SET:
1405
      if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1406
        /* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
1407
        return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1408
      else
1409
        return -1;
1410
    case PARALLEL:
1411
      if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1412
          && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1413
        {
1414
          /* Multiple output operands, or 1 output plus some clobbers:
1415
             body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
1416
          int i;
1417
          int n_sets;
1418
 
1419
          /* Count backwards through CLOBBERs to determine number of SETs.  */
1420
          for (i = XVECLEN (body, 0); i > 0; i--)
1421
            {
1422
              if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1423
                break;
1424
              if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1425
                return -1;
1426
            }
1427
 
1428
          /* N_SETS is now number of output operands.  */
1429
          n_sets = i;
1430
 
1431
          /* Verify that all the SETs we have
1432
             came from a single original asm_operands insn
1433
             (so that invalid combinations are blocked).  */
1434
          for (i = 0; i < n_sets; i++)
1435
            {
1436
              rtx elt = XVECEXP (body, 0, i);
1437
              if (GET_CODE (elt) != SET)
1438
                return -1;
1439
              if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1440
                return -1;
1441
              /* If these ASM_OPERANDS rtx's came from different original insns
1442
                 then they aren't allowed together.  */
1443
              if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1444
                  != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1445
                return -1;
1446
            }
1447
          return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1448
                  + n_sets);
1449
        }
1450
      else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1451
        {
1452
          /* 0 outputs, but some clobbers:
1453
             body is [(asm_operands ...) (clobber (reg ...))...].  */
1454
          int i;
1455
 
1456
          /* Make sure all the other parallel things really are clobbers.  */
1457
          for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1458
            if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1459
              return -1;
1460
 
1461
          return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1462
        }
1463
      else
1464
        return -1;
1465
    default:
1466
      return -1;
1467
    }
1468
}
1469
 
1470
/* Assuming BODY is an insn body that uses ASM_OPERANDS,
1471
   copy its operands (both input and output) into the vector OPERANDS,
1472
   the locations of the operands within the insn into the vector OPERAND_LOCS,
1473
   and the constraints for the operands into CONSTRAINTS.
1474
   Write the modes of the operands into MODES.
1475
   Return the assembler-template.
1476
 
1477
   If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1478
   we don't store that info.  */
1479
 
1480
const char *
1481
decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1482
                     const char **constraints, enum machine_mode *modes)
1483
{
1484
  int i;
1485
  int noperands;
1486
  const char *template = 0;
1487
 
1488
  if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1489
    {
1490
      rtx asmop = SET_SRC (body);
1491
      /* Single output operand: BODY is (set OUTPUT (asm_operands ....)).  */
1492
 
1493
      noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1494
 
1495
      for (i = 1; i < noperands; i++)
1496
        {
1497
          if (operand_locs)
1498
            operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1499
          if (operands)
1500
            operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1501
          if (constraints)
1502
            constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1503
          if (modes)
1504
            modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1505
        }
1506
 
1507
      /* The output is in the SET.
1508
         Its constraint is in the ASM_OPERANDS itself.  */
1509
      if (operands)
1510
        operands[0] = SET_DEST (body);
1511
      if (operand_locs)
1512
        operand_locs[0] = &SET_DEST (body);
1513
      if (constraints)
1514
        constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1515
      if (modes)
1516
        modes[0] = GET_MODE (SET_DEST (body));
1517
      template = ASM_OPERANDS_TEMPLATE (asmop);
1518
    }
1519
  else if (GET_CODE (body) == ASM_OPERANDS)
1520
    {
1521
      rtx asmop = body;
1522
      /* No output operands: BODY is (asm_operands ....).  */
1523
 
1524
      noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1525
 
1526
      /* The input operands are found in the 1st element vector.  */
1527
      /* Constraints for inputs are in the 2nd element vector.  */
1528
      for (i = 0; i < noperands; i++)
1529
        {
1530
          if (operand_locs)
1531
            operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1532
          if (operands)
1533
            operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1534
          if (constraints)
1535
            constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1536
          if (modes)
1537
            modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1538
        }
1539
      template = ASM_OPERANDS_TEMPLATE (asmop);
1540
    }
1541
  else if (GET_CODE (body) == PARALLEL
1542
           && GET_CODE (XVECEXP (body, 0, 0)) == SET
1543
           && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1544
    {
1545
      rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1546
      int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
1547
      int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1548
      int nout = 0;              /* Does not include CLOBBERs.  */
1549
 
1550
      /* At least one output, plus some CLOBBERs.  */
1551
 
1552
      /* The outputs are in the SETs.
1553
         Their constraints are in the ASM_OPERANDS itself.  */
1554
      for (i = 0; i < nparallel; i++)
1555
        {
1556
          if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1557
            break;              /* Past last SET */
1558
 
1559
          if (operands)
1560
            operands[i] = SET_DEST (XVECEXP (body, 0, i));
1561
          if (operand_locs)
1562
            operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1563
          if (constraints)
1564
            constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1565
          if (modes)
1566
            modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1567
          nout++;
1568
        }
1569
 
1570
      for (i = 0; i < nin; i++)
1571
        {
1572
          if (operand_locs)
1573
            operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1574
          if (operands)
1575
            operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1576
          if (constraints)
1577
            constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1578
          if (modes)
1579
            modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1580
        }
1581
 
1582
      template = ASM_OPERANDS_TEMPLATE (asmop);
1583
    }
1584
  else if (GET_CODE (body) == PARALLEL
1585
           && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1586
    {
1587
      /* No outputs, but some CLOBBERs.  */
1588
 
1589
      rtx asmop = XVECEXP (body, 0, 0);
1590
      int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1591
 
1592
      for (i = 0; i < nin; i++)
1593
        {
1594
          if (operand_locs)
1595
            operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1596
          if (operands)
1597
            operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1598
          if (constraints)
1599
            constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1600
          if (modes)
1601
            modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1602
        }
1603
 
1604
      template = ASM_OPERANDS_TEMPLATE (asmop);
1605
    }
1606
 
1607
  return template;
1608
}
1609
 
1610
/* Check if an asm_operand matches its constraints.
1611
   Return > 0 if ok, = 0 if bad, < 0 if inconclusive.  */
1612
 
1613
int
1614
asm_operand_ok (rtx op, const char *constraint)
1615
{
1616
  int result = 0;
1617
 
1618
  /* Use constrain_operands after reload.  */
1619
  gcc_assert (!reload_completed);
1620
 
1621
  while (*constraint)
1622
    {
1623
      char c = *constraint;
1624
      int len;
1625
      switch (c)
1626
        {
1627
        case ',':
1628
          constraint++;
1629
          continue;
1630
        case '=':
1631
        case '+':
1632
        case '*':
1633
        case '%':
1634
        case '!':
1635
        case '#':
1636
        case '&':
1637
        case '?':
1638
          break;
1639
 
1640
        case '0': case '1': case '2': case '3': case '4':
1641
        case '5': case '6': case '7': case '8': case '9':
1642
          /* For best results, our caller should have given us the
1643
             proper matching constraint, but we can't actually fail
1644
             the check if they didn't.  Indicate that results are
1645
             inconclusive.  */
1646
          do
1647
            constraint++;
1648
          while (ISDIGIT (*constraint));
1649
          if (! result)
1650
            result = -1;
1651
          continue;
1652
 
1653
        case 'p':
1654
          if (address_operand (op, VOIDmode))
1655
            result = 1;
1656
          break;
1657
 
1658
        case 'm':
1659
        case 'V': /* non-offsettable */
1660
          if (memory_operand (op, VOIDmode))
1661
            result = 1;
1662
          break;
1663
 
1664
        case 'o': /* offsettable */
1665
          if (offsettable_nonstrict_memref_p (op))
1666
            result = 1;
1667
          break;
1668
 
1669
        case '<':
1670
          /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1671
             excepting those that expand_call created.  Further, on some
1672
             machines which do not have generalized auto inc/dec, an inc/dec
1673
             is not a memory_operand.
1674
 
1675
             Match any memory and hope things are resolved after reload.  */
1676
 
1677
          if (MEM_P (op)
1678
              && (1
1679
                  || GET_CODE (XEXP (op, 0)) == PRE_DEC
1680
                  || GET_CODE (XEXP (op, 0)) == POST_DEC))
1681
            result = 1;
1682
          break;
1683
 
1684
        case '>':
1685
          if (MEM_P (op)
1686
              && (1
1687
                  || GET_CODE (XEXP (op, 0)) == PRE_INC
1688
                  || GET_CODE (XEXP (op, 0)) == POST_INC))
1689
            result = 1;
1690
          break;
1691
 
1692
        case 'E':
1693
        case 'F':
1694
          if (GET_CODE (op) == CONST_DOUBLE
1695
              || (GET_CODE (op) == CONST_VECTOR
1696
                  && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1697
            result = 1;
1698
          break;
1699
 
1700
        case 'G':
1701
          if (GET_CODE (op) == CONST_DOUBLE
1702
              && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1703
            result = 1;
1704
          break;
1705
        case 'H':
1706
          if (GET_CODE (op) == CONST_DOUBLE
1707
              && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1708
            result = 1;
1709
          break;
1710
 
1711
        case 's':
1712
          if (GET_CODE (op) == CONST_INT
1713
              || (GET_CODE (op) == CONST_DOUBLE
1714
                  && GET_MODE (op) == VOIDmode))
1715
            break;
1716
          /* Fall through.  */
1717
 
1718
        case 'i':
1719
          if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1720
            result = 1;
1721
          break;
1722
 
1723
        case 'n':
1724
          if (GET_CODE (op) == CONST_INT
1725
              || (GET_CODE (op) == CONST_DOUBLE
1726
                  && GET_MODE (op) == VOIDmode))
1727
            result = 1;
1728
          break;
1729
 
1730
        case 'I':
1731
          if (GET_CODE (op) == CONST_INT
1732
              && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1733
            result = 1;
1734
          break;
1735
        case 'J':
1736
          if (GET_CODE (op) == CONST_INT
1737
              && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1738
            result = 1;
1739
          break;
1740
        case 'K':
1741
          if (GET_CODE (op) == CONST_INT
1742
              && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1743
            result = 1;
1744
          break;
1745
        case 'L':
1746
          if (GET_CODE (op) == CONST_INT
1747
              && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1748
            result = 1;
1749
          break;
1750
        case 'M':
1751
          if (GET_CODE (op) == CONST_INT
1752
              && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1753
            result = 1;
1754
          break;
1755
        case 'N':
1756
          if (GET_CODE (op) == CONST_INT
1757
              && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1758
            result = 1;
1759
          break;
1760
        case 'O':
1761
          if (GET_CODE (op) == CONST_INT
1762
              && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1763
            result = 1;
1764
          break;
1765
        case 'P':
1766
          if (GET_CODE (op) == CONST_INT
1767
              && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1768
            result = 1;
1769
          break;
1770
 
1771
        case 'X':
1772
          result = 1;
1773
          break;
1774
 
1775
        case 'g':
1776
          if (general_operand (op, VOIDmode))
1777
            result = 1;
1778
          break;
1779
 
1780
        default:
1781
          /* For all other letters, we first check for a register class,
1782
             otherwise it is an EXTRA_CONSTRAINT.  */
1783
          if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1784
            {
1785
            case 'r':
1786
              if (GET_MODE (op) == BLKmode)
1787
                break;
1788
              if (register_operand (op, VOIDmode))
1789
                result = 1;
1790
            }
1791
#ifdef EXTRA_CONSTRAINT_STR
1792
          else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1793
            result = 1;
1794
          else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1795
                   /* Every memory operand can be reloaded to fit.  */
1796
                   && memory_operand (op, VOIDmode))
1797
            result = 1;
1798
          else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1799
                   /* Every address operand can be reloaded to fit.  */
1800
                   && address_operand (op, VOIDmode))
1801
            result = 1;
1802
#endif
1803
          break;
1804
        }
1805
      len = CONSTRAINT_LEN (c, constraint);
1806
      do
1807
        constraint++;
1808
      while (--len && *constraint);
1809
      if (len)
1810
        return 0;
1811
    }
1812
 
1813
  return result;
1814
}
1815
 
1816
/* Given an rtx *P, if it is a sum containing an integer constant term,
1817
   return the location (type rtx *) of the pointer to that constant term.
1818
   Otherwise, return a null pointer.  */
1819
 
1820
rtx *
1821
find_constant_term_loc (rtx *p)
1822
{
1823
  rtx *tem;
1824
  enum rtx_code code = GET_CODE (*p);
1825
 
1826
  /* If *P IS such a constant term, P is its location.  */
1827
 
1828
  if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1829
      || code == CONST)
1830
    return p;
1831
 
1832
  /* Otherwise, if not a sum, it has no constant term.  */
1833
 
1834
  if (GET_CODE (*p) != PLUS)
1835
    return 0;
1836
 
1837
  /* If one of the summands is constant, return its location.  */
1838
 
1839
  if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1840
      && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1841
    return p;
1842
 
1843
  /* Otherwise, check each summand for containing a constant term.  */
1844
 
1845
  if (XEXP (*p, 0) != 0)
1846
    {
1847
      tem = find_constant_term_loc (&XEXP (*p, 0));
1848
      if (tem != 0)
1849
        return tem;
1850
    }
1851
 
1852
  if (XEXP (*p, 1) != 0)
1853
    {
1854
      tem = find_constant_term_loc (&XEXP (*p, 1));
1855
      if (tem != 0)
1856
        return tem;
1857
    }
1858
 
1859
  return 0;
1860
}
1861
 
1862
/* Return 1 if OP is a memory reference
1863
   whose address contains no side effects
1864
   and remains valid after the addition
1865
   of a positive integer less than the
1866
   size of the object being referenced.
1867
 
1868
   We assume that the original address is valid and do not check it.
1869
 
1870
   This uses strict_memory_address_p as a subroutine, so
1871
   don't use it before reload.  */
1872
 
1873
int
1874
offsettable_memref_p (rtx op)
1875
{
1876
  return ((MEM_P (op))
1877
          && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1878
}
1879
 
1880
/* Similar, but don't require a strictly valid mem ref:
1881
   consider pseudo-regs valid as index or base regs.  */
1882
 
1883
int
1884
offsettable_nonstrict_memref_p (rtx op)
1885
{
1886
  return ((MEM_P (op))
1887
          && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1888
}
1889
 
1890
/* Return 1 if Y is a memory address which contains no side effects
1891
   and would remain valid after the addition of a positive integer
1892
   less than the size of that mode.
1893
 
1894
   We assume that the original address is valid and do not check it.
1895
   We do check that it is valid for narrower modes.
1896
 
1897
   If STRICTP is nonzero, we require a strictly valid address,
1898
   for the sake of use in reload.c.  */
1899
 
1900
int
1901
offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1902
{
1903
  enum rtx_code ycode = GET_CODE (y);
1904
  rtx z;
1905
  rtx y1 = y;
1906
  rtx *y2;
1907
  int (*addressp) (enum machine_mode, rtx) =
1908
    (strictp ? strict_memory_address_p : memory_address_p);
1909
  unsigned int mode_sz = GET_MODE_SIZE (mode);
1910
 
1911
  if (CONSTANT_ADDRESS_P (y))
1912
    return 1;
1913
 
1914
  /* Adjusting an offsettable address involves changing to a narrower mode.
1915
     Make sure that's OK.  */
1916
 
1917
  if (mode_dependent_address_p (y))
1918
    return 0;
1919
 
1920
  /* ??? How much offset does an offsettable BLKmode reference need?
1921
     Clearly that depends on the situation in which it's being used.
1922
     However, the current situation in which we test 0xffffffff is
1923
     less than ideal.  Caveat user.  */
1924
  if (mode_sz == 0)
1925
    mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1926
 
1927
  /* If the expression contains a constant term,
1928
     see if it remains valid when max possible offset is added.  */
1929
 
1930
  if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1931
    {
1932
      int good;
1933
 
1934
      y1 = *y2;
1935
      *y2 = plus_constant (*y2, mode_sz - 1);
1936
      /* Use QImode because an odd displacement may be automatically invalid
1937
         for any wider mode.  But it should be valid for a single byte.  */
1938
      good = (*addressp) (QImode, y);
1939
 
1940
      /* In any case, restore old contents of memory.  */
1941
      *y2 = y1;
1942
      return good;
1943
    }
1944
 
1945
  if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1946
    return 0;
1947
 
1948
  /* The offset added here is chosen as the maximum offset that
1949
     any instruction could need to add when operating on something
1950
     of the specified mode.  We assume that if Y and Y+c are
1951
     valid addresses then so is Y+d for all 0<d<c.  adjust_address will
1952
     go inside a LO_SUM here, so we do so as well.  */
1953
  if (GET_CODE (y) == LO_SUM
1954
      && mode != BLKmode
1955
      && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1956
    z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1957
                        plus_constant (XEXP (y, 1), mode_sz - 1));
1958
  else
1959
    z = plus_constant (y, mode_sz - 1);
1960
 
1961
  /* Use QImode because an odd displacement may be automatically invalid
1962
     for any wider mode.  But it should be valid for a single byte.  */
1963
  return (*addressp) (QImode, z);
1964
}
1965
 
1966
/* Return 1 if ADDR is an address-expression whose effect depends
1967
   on the mode of the memory reference it is used in.
1968
 
1969
   Autoincrement addressing is a typical example of mode-dependence
1970
   because the amount of the increment depends on the mode.  */
1971
 
1972
int
1973
mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS.  */)
1974
{
1975
  GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1976
  return 0;
1977
  /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS.  */
1978
 win: ATTRIBUTE_UNUSED_LABEL
1979
  return 1;
1980
}
1981
 
1982
/* Like extract_insn, but save insn extracted and don't extract again, when
1983
   called again for the same insn expecting that recog_data still contain the
1984
   valid information.  This is used primary by gen_attr infrastructure that
1985
   often does extract insn again and again.  */
1986
void
1987
extract_insn_cached (rtx insn)
1988
{
1989
  if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1990
    return;
1991
  extract_insn (insn);
1992
  recog_data.insn = insn;
1993
}
1994
/* Do cached extract_insn, constrain_operands and complain about failures.
1995
   Used by insn_attrtab.  */
1996
void
1997
extract_constrain_insn_cached (rtx insn)
1998
{
1999
  extract_insn_cached (insn);
2000
  if (which_alternative == -1
2001
      && !constrain_operands (reload_completed))
2002
    fatal_insn_not_found (insn);
2003
}
2004
/* Do cached constrain_operands and complain about failures.  */
2005
int
2006
constrain_operands_cached (int strict)
2007
{
2008
  if (which_alternative == -1)
2009
    return constrain_operands (strict);
2010
  else
2011
    return 1;
2012
}
2013
 
2014
/* Analyze INSN and fill in recog_data.  */
2015
 
2016
void
2017
extract_insn (rtx insn)
2018
{
2019
  int i;
2020
  int icode;
2021
  int noperands;
2022
  rtx body = PATTERN (insn);
2023
 
2024
  recog_data.insn = NULL;
2025
  recog_data.n_operands = 0;
2026
  recog_data.n_alternatives = 0;
2027
  recog_data.n_dups = 0;
2028
  which_alternative = -1;
2029
 
2030
  switch (GET_CODE (body))
2031
    {
2032
    case USE:
2033
    case CLOBBER:
2034
    case ASM_INPUT:
2035
    case ADDR_VEC:
2036
    case ADDR_DIFF_VEC:
2037
      return;
2038
 
2039
    case SET:
2040
      if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2041
        goto asm_insn;
2042
      else
2043
        goto normal_insn;
2044
    case PARALLEL:
2045
      if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2046
           && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2047
          || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2048
        goto asm_insn;
2049
      else
2050
        goto normal_insn;
2051
    case ASM_OPERANDS:
2052
    asm_insn:
2053
      recog_data.n_operands = noperands = asm_noperands (body);
2054
      if (noperands >= 0)
2055
        {
2056
          /* This insn is an `asm' with operands.  */
2057
 
2058
          /* expand_asm_operands makes sure there aren't too many operands.  */
2059
          gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2060
 
2061
          /* Now get the operand values and constraints out of the insn.  */
2062
          decode_asm_operands (body, recog_data.operand,
2063
                               recog_data.operand_loc,
2064
                               recog_data.constraints,
2065
                               recog_data.operand_mode);
2066
          if (noperands > 0)
2067
            {
2068
              const char *p =  recog_data.constraints[0];
2069
              recog_data.n_alternatives = 1;
2070
              while (*p)
2071
                recog_data.n_alternatives += (*p++ == ',');
2072
            }
2073
          break;
2074
        }
2075
      fatal_insn_not_found (insn);
2076
 
2077
    default:
2078
    normal_insn:
2079
      /* Ordinary insn: recognize it, get the operands via insn_extract
2080
         and get the constraints.  */
2081
 
2082
      icode = recog_memoized (insn);
2083
      if (icode < 0)
2084
        fatal_insn_not_found (insn);
2085
 
2086
      recog_data.n_operands = noperands = insn_data[icode].n_operands;
2087
      recog_data.n_alternatives = insn_data[icode].n_alternatives;
2088
      recog_data.n_dups = insn_data[icode].n_dups;
2089
 
2090
      insn_extract (insn);
2091
 
2092
      for (i = 0; i < noperands; i++)
2093
        {
2094
          recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2095
          recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2096
          /* VOIDmode match_operands gets mode from their real operand.  */
2097
          if (recog_data.operand_mode[i] == VOIDmode)
2098
            recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2099
        }
2100
    }
2101
  for (i = 0; i < noperands; i++)
2102
    recog_data.operand_type[i]
2103
      = (recog_data.constraints[i][0] == '=' ? OP_OUT
2104
         : recog_data.constraints[i][0] == '+' ? OP_INOUT
2105
         : OP_IN);
2106
 
2107
  gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2108
}
2109
 
2110
/* After calling extract_insn, you can use this function to extract some
2111
   information from the constraint strings into a more usable form.
2112
   The collected data is stored in recog_op_alt.  */
2113
void
2114
preprocess_constraints (void)
2115
{
2116
  int i;
2117
 
2118
  for (i = 0; i < recog_data.n_operands; i++)
2119
    memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2120
                                 * sizeof (struct operand_alternative)));
2121
 
2122
  for (i = 0; i < recog_data.n_operands; i++)
2123
    {
2124
      int j;
2125
      struct operand_alternative *op_alt;
2126
      const char *p = recog_data.constraints[i];
2127
 
2128
      op_alt = recog_op_alt[i];
2129
 
2130
      for (j = 0; j < recog_data.n_alternatives; j++)
2131
        {
2132
          op_alt[j].cl = NO_REGS;
2133
          op_alt[j].constraint = p;
2134
          op_alt[j].matches = -1;
2135
          op_alt[j].matched = -1;
2136
 
2137
          if (*p == '\0' || *p == ',')
2138
            {
2139
              op_alt[j].anything_ok = 1;
2140
              continue;
2141
            }
2142
 
2143
          for (;;)
2144
            {
2145
              char c = *p;
2146
              if (c == '#')
2147
                do
2148
                  c = *++p;
2149
                while (c != ',' && c != '\0');
2150
              if (c == ',' || c == '\0')
2151
                {
2152
                  p++;
2153
                  break;
2154
                }
2155
 
2156
              switch (c)
2157
                {
2158
                case '=': case '+': case '*': case '%':
2159
                case 'E': case 'F': case 'G': case 'H':
2160
                case 's': case 'i': case 'n':
2161
                case 'I': case 'J': case 'K': case 'L':
2162
                case 'M': case 'N': case 'O': case 'P':
2163
                  /* These don't say anything we care about.  */
2164
                  break;
2165
 
2166
                case '?':
2167
                  op_alt[j].reject += 6;
2168
                  break;
2169
                case '!':
2170
                  op_alt[j].reject += 600;
2171
                  break;
2172
                case '&':
2173
                  op_alt[j].earlyclobber = 1;
2174
                  break;
2175
 
2176
                case '0': case '1': case '2': case '3': case '4':
2177
                case '5': case '6': case '7': case '8': case '9':
2178
                  {
2179
                    char *end;
2180
                    op_alt[j].matches = strtoul (p, &end, 10);
2181
                    recog_op_alt[op_alt[j].matches][j].matched = i;
2182
                    p = end;
2183
                  }
2184
                  continue;
2185
 
2186
                case 'm':
2187
                  op_alt[j].memory_ok = 1;
2188
                  break;
2189
                case '<':
2190
                  op_alt[j].decmem_ok = 1;
2191
                  break;
2192
                case '>':
2193
                  op_alt[j].incmem_ok = 1;
2194
                  break;
2195
                case 'V':
2196
                  op_alt[j].nonoffmem_ok = 1;
2197
                  break;
2198
                case 'o':
2199
                  op_alt[j].offmem_ok = 1;
2200
                  break;
2201
                case 'X':
2202
                  op_alt[j].anything_ok = 1;
2203
                  break;
2204
 
2205
                case 'p':
2206
                  op_alt[j].is_address = 1;
2207
                  op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2208
                    [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2209
                  break;
2210
 
2211
                case 'g':
2212
                case 'r':
2213
                  op_alt[j].cl =
2214
                   reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2215
                  break;
2216
 
2217
                default:
2218
                  if (EXTRA_MEMORY_CONSTRAINT (c, p))
2219
                    {
2220
                      op_alt[j].memory_ok = 1;
2221
                      break;
2222
                    }
2223
                  if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2224
                    {
2225
                      op_alt[j].is_address = 1;
2226
                      op_alt[j].cl
2227
                        = (reg_class_subunion
2228
                           [(int) op_alt[j].cl]
2229
                           [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2230
                      break;
2231
                    }
2232
 
2233
                  op_alt[j].cl
2234
                    = (reg_class_subunion
2235
                       [(int) op_alt[j].cl]
2236
                       [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2237
                  break;
2238
                }
2239
              p += CONSTRAINT_LEN (c, p);
2240
            }
2241
        }
2242
    }
2243
}
2244
 
2245
/* Check the operands of an insn against the insn's operand constraints
2246
   and return 1 if they are valid.
2247
   The information about the insn's operands, constraints, operand modes
2248
   etc. is obtained from the global variables set up by extract_insn.
2249
 
2250
   WHICH_ALTERNATIVE is set to a number which indicates which
2251
   alternative of constraints was matched: 0 for the first alternative,
2252
   1 for the next, etc.
2253
 
2254
   In addition, when two operands are required to match
2255
   and it happens that the output operand is (reg) while the
2256
   input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2257
   make the output operand look like the input.
2258
   This is because the output operand is the one the template will print.
2259
 
2260
   This is used in final, just before printing the assembler code and by
2261
   the routines that determine an insn's attribute.
2262
 
2263
   If STRICT is a positive nonzero value, it means that we have been
2264
   called after reload has been completed.  In that case, we must
2265
   do all checks strictly.  If it is zero, it means that we have been called
2266
   before reload has completed.  In that case, we first try to see if we can
2267
   find an alternative that matches strictly.  If not, we try again, this
2268
   time assuming that reload will fix up the insn.  This provides a "best
2269
   guess" for the alternative and is used to compute attributes of insns prior
2270
   to reload.  A negative value of STRICT is used for this internal call.  */
2271
 
2272
struct funny_match
2273
{
2274
  int this, other;
2275
};
2276
 
2277
int
2278
constrain_operands (int strict)
2279
{
2280
  const char *constraints[MAX_RECOG_OPERANDS];
2281
  int matching_operands[MAX_RECOG_OPERANDS];
2282
  int earlyclobber[MAX_RECOG_OPERANDS];
2283
  int c;
2284
 
2285
  struct funny_match funny_match[MAX_RECOG_OPERANDS];
2286
  int funny_match_index;
2287
 
2288
  which_alternative = 0;
2289
  if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2290
    return 1;
2291
 
2292
  for (c = 0; c < recog_data.n_operands; c++)
2293
    {
2294
      constraints[c] = recog_data.constraints[c];
2295
      matching_operands[c] = -1;
2296
    }
2297
 
2298
  do
2299
    {
2300
      int seen_earlyclobber_at = -1;
2301
      int opno;
2302
      int lose = 0;
2303
      funny_match_index = 0;
2304
 
2305
      for (opno = 0; opno < recog_data.n_operands; opno++)
2306
        {
2307
          rtx op = recog_data.operand[opno];
2308
          enum machine_mode mode = GET_MODE (op);
2309
          const char *p = constraints[opno];
2310
          int offset = 0;
2311
          int win = 0;
2312
          int val;
2313
          int len;
2314
 
2315
          earlyclobber[opno] = 0;
2316
 
2317
#ifndef KEEP_UNARY_OPERATORS_AT_CONSTRAINT_CHECKING
2318
          /* This macro and the code within is slated for removal in
2319
             4.2, hence not documented further than in this comment.
2320
             It only makes a difference if both an insn operand
2321
             predicate is absent or allows unary operators and its
2322
             constraints are present.  See gcc-patches mailing list
2323
             thread starting at
2324
             <URL:http://gcc.gnu.org/ml/gcc-patches/2005-10/msg00940.html>.  */
2325
 
2326
          /* A unary operator may be accepted by the predicate, but it
2327
             is irrelevant for matching constraints.  */
2328
          if (UNARY_P (op))
2329
            op = XEXP (op, 0);
2330
#endif
2331
 
2332
          if (GET_CODE (op) == SUBREG)
2333
            {
2334
              if (REG_P (SUBREG_REG (op))
2335
                  && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2336
                offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2337
                                              GET_MODE (SUBREG_REG (op)),
2338
                                              SUBREG_BYTE (op),
2339
                                              GET_MODE (op));
2340
              op = SUBREG_REG (op);
2341
            }
2342
 
2343
          /* An empty constraint or empty alternative
2344
             allows anything which matched the pattern.  */
2345
          if (*p == 0 || *p == ',')
2346
            win = 1;
2347
 
2348
          do
2349
            switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2350
              {
2351
              case '\0':
2352
                len = 0;
2353
                break;
2354
              case ',':
2355
                c = '\0';
2356
                break;
2357
 
2358
              case '?':  case '!': case '*':  case '%':
2359
              case '=':  case '+':
2360
                break;
2361
 
2362
              case '#':
2363
                /* Ignore rest of this alternative as far as
2364
                   constraint checking is concerned.  */
2365
                do
2366
                  p++;
2367
                while (*p && *p != ',');
2368
                len = 0;
2369
                break;
2370
 
2371
              case '&':
2372
                earlyclobber[opno] = 1;
2373
                if (seen_earlyclobber_at < 0)
2374
                  seen_earlyclobber_at = opno;
2375
                break;
2376
 
2377
              case '0':  case '1':  case '2':  case '3':  case '4':
2378
              case '5':  case '6':  case '7':  case '8':  case '9':
2379
                {
2380
                  /* This operand must be the same as a previous one.
2381
                     This kind of constraint is used for instructions such
2382
                     as add when they take only two operands.
2383
 
2384
                     Note that the lower-numbered operand is passed first.
2385
 
2386
                     If we are not testing strictly, assume that this
2387
                     constraint will be satisfied.  */
2388
 
2389
                  char *end;
2390
                  int match;
2391
 
2392
                  match = strtoul (p, &end, 10);
2393
                  p = end;
2394
 
2395
                  if (strict < 0)
2396
                    val = 1;
2397
                  else
2398
                    {
2399
                      rtx op1 = recog_data.operand[match];
2400
                      rtx op2 = recog_data.operand[opno];
2401
 
2402
#ifndef KEEP_UNARY_OPERATORS_AT_CONSTRAINT_CHECKING
2403
                      /* See comment at similar #ifndef above.  */
2404
 
2405
                      /* A unary operator may be accepted by the predicate,
2406
                         but it is irrelevant for matching constraints.  */
2407
                      if (UNARY_P (op1))
2408
                        op1 = XEXP (op1, 0);
2409
                      if (UNARY_P (op2))
2410
                        op2 = XEXP (op2, 0);
2411
#endif
2412
 
2413
                      val = operands_match_p (op1, op2);
2414
                    }
2415
 
2416
                  matching_operands[opno] = match;
2417
                  matching_operands[match] = opno;
2418
 
2419
                  if (val != 0)
2420
                    win = 1;
2421
 
2422
                  /* If output is *x and input is *--x, arrange later
2423
                     to change the output to *--x as well, since the
2424
                     output op is the one that will be printed.  */
2425
                  if (val == 2 && strict > 0)
2426
                    {
2427
                      funny_match[funny_match_index].this = opno;
2428
                      funny_match[funny_match_index++].other = match;
2429
                    }
2430
                }
2431
                len = 0;
2432
                break;
2433
 
2434
              case 'p':
2435
                /* p is used for address_operands.  When we are called by
2436
                   gen_reload, no one will have checked that the address is
2437
                   strictly valid, i.e., that all pseudos requiring hard regs
2438
                   have gotten them.  */
2439
                if (strict <= 0
2440
                    || (strict_memory_address_p (recog_data.operand_mode[opno],
2441
                                                 op)))
2442
                  win = 1;
2443
                break;
2444
 
2445
                /* No need to check general_operand again;
2446
                   it was done in insn-recog.c.  Well, except that reload
2447
                   doesn't check the validity of its replacements, but
2448
                   that should only matter when there's a bug.  */
2449
              case 'g':
2450
                /* Anything goes unless it is a REG and really has a hard reg
2451
                   but the hard reg is not in the class GENERAL_REGS.  */
2452
                if (REG_P (op))
2453
                  {
2454
                    if (strict < 0
2455
                        || GENERAL_REGS == ALL_REGS
2456
                        || (reload_in_progress
2457
                            && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2458
                        || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2459
                      win = 1;
2460
                  }
2461
                else if (strict < 0 || general_operand (op, mode))
2462
                  win = 1;
2463
                break;
2464
 
2465
              case 'X':
2466
                /* This is used for a MATCH_SCRATCH in the cases when
2467
                   we don't actually need anything.  So anything goes
2468
                   any time.  */
2469
                win = 1;
2470
                break;
2471
 
2472
              case 'm':
2473
                /* Memory operands must be valid, to the extent
2474
                   required by STRICT.  */
2475
                if (MEM_P (op))
2476
                  {
2477
                    if (strict > 0
2478
                        && !strict_memory_address_p (GET_MODE (op),
2479
                                                     XEXP (op, 0)))
2480
                      break;
2481
                    if (strict == 0
2482
                        && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2483
                      break;
2484
                    win = 1;
2485
                  }
2486
                /* Before reload, accept what reload can turn into mem.  */
2487
                else if (strict < 0 && CONSTANT_P (op))
2488
                  win = 1;
2489
                /* During reload, accept a pseudo  */
2490
                else if (reload_in_progress && REG_P (op)
2491
                         && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2492
                  win = 1;
2493
                break;
2494
 
2495
              case '<':
2496
                if (MEM_P (op)
2497
                    && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2498
                        || GET_CODE (XEXP (op, 0)) == POST_DEC))
2499
                  win = 1;
2500
                break;
2501
 
2502
              case '>':
2503
                if (MEM_P (op)
2504
                    && (GET_CODE (XEXP (op, 0)) == PRE_INC
2505
                        || GET_CODE (XEXP (op, 0)) == POST_INC))
2506
                  win = 1;
2507
                break;
2508
 
2509
              case 'E':
2510
              case 'F':
2511
                if (GET_CODE (op) == CONST_DOUBLE
2512
                    || (GET_CODE (op) == CONST_VECTOR
2513
                        && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2514
                  win = 1;
2515
                break;
2516
 
2517
              case 'G':
2518
              case 'H':
2519
                if (GET_CODE (op) == CONST_DOUBLE
2520
                    && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2521
                  win = 1;
2522
                break;
2523
 
2524
              case 's':
2525
                if (GET_CODE (op) == CONST_INT
2526
                    || (GET_CODE (op) == CONST_DOUBLE
2527
                        && GET_MODE (op) == VOIDmode))
2528
                  break;
2529
              case 'i':
2530
                if (CONSTANT_P (op))
2531
                  win = 1;
2532
                break;
2533
 
2534
              case 'n':
2535
                if (GET_CODE (op) == CONST_INT
2536
                    || (GET_CODE (op) == CONST_DOUBLE
2537
                        && GET_MODE (op) == VOIDmode))
2538
                  win = 1;
2539
                break;
2540
 
2541
              case 'I':
2542
              case 'J':
2543
              case 'K':
2544
              case 'L':
2545
              case 'M':
2546
              case 'N':
2547
              case 'O':
2548
              case 'P':
2549
                if (GET_CODE (op) == CONST_INT
2550
                    && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2551
                  win = 1;
2552
                break;
2553
 
2554
              case 'V':
2555
                if (MEM_P (op)
2556
                    && ((strict > 0 && ! offsettable_memref_p (op))
2557
                        || (strict < 0
2558
                            && !(CONSTANT_P (op) || MEM_P (op)))
2559
                        || (reload_in_progress
2560
                            && !(REG_P (op)
2561
                                 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2562
                  win = 1;
2563
                break;
2564
 
2565
              case 'o':
2566
                if ((strict > 0 && offsettable_memref_p (op))
2567
                    || (strict == 0 && offsettable_nonstrict_memref_p (op))
2568
                    /* Before reload, accept what reload can handle.  */
2569
                    || (strict < 0
2570
                        && (CONSTANT_P (op) || MEM_P (op)))
2571
                    /* During reload, accept a pseudo  */
2572
                    || (reload_in_progress && REG_P (op)
2573
                        && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2574
                  win = 1;
2575
                break;
2576
 
2577
              default:
2578
                {
2579
                  enum reg_class cl;
2580
 
2581
                  cl = (c == 'r'
2582
                           ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2583
                  if (cl != NO_REGS)
2584
                    {
2585
                      if (strict < 0
2586
                          || (strict == 0
2587
                              && REG_P (op)
2588
                              && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2589
                          || (strict == 0 && GET_CODE (op) == SCRATCH)
2590
                          || (REG_P (op)
2591
                              && reg_fits_class_p (op, cl, offset, mode)))
2592
                        win = 1;
2593
                    }
2594
#ifdef EXTRA_CONSTRAINT_STR
2595
                  else if (EXTRA_CONSTRAINT_STR (op, c, p))
2596
                    win = 1;
2597
 
2598
                  else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2599
                           /* Every memory operand can be reloaded to fit.  */
2600
                           && ((strict < 0 && MEM_P (op))
2601
                               /* Before reload, accept what reload can turn
2602
                                  into mem.  */
2603
                               || (strict < 0 && CONSTANT_P (op))
2604
                               /* During reload, accept a pseudo  */
2605
                               || (reload_in_progress && REG_P (op)
2606
                                   && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2607
                    win = 1;
2608
                  else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2609
                           /* Every address operand can be reloaded to fit.  */
2610
                           && strict < 0)
2611
                    win = 1;
2612
#endif
2613
                  break;
2614
                }
2615
              }
2616
          while (p += len, c);
2617
 
2618
          constraints[opno] = p;
2619
          /* If this operand did not win somehow,
2620
             this alternative loses.  */
2621
          if (! win)
2622
            lose = 1;
2623
        }
2624
      /* This alternative won; the operands are ok.
2625
         Change whichever operands this alternative says to change.  */
2626
      if (! lose)
2627
        {
2628
          int opno, eopno;
2629
 
2630
          /* See if any earlyclobber operand conflicts with some other
2631
             operand.  */
2632
 
2633
          if (strict > 0  && seen_earlyclobber_at >= 0)
2634
            for (eopno = seen_earlyclobber_at;
2635
                 eopno < recog_data.n_operands;
2636
                 eopno++)
2637
              /* Ignore earlyclobber operands now in memory,
2638
                 because we would often report failure when we have
2639
                 two memory operands, one of which was formerly a REG.  */
2640
              if (earlyclobber[eopno]
2641
                  && REG_P (recog_data.operand[eopno]))
2642
                for (opno = 0; opno < recog_data.n_operands; opno++)
2643
                  if ((MEM_P (recog_data.operand[opno])
2644
                       || recog_data.operand_type[opno] != OP_OUT)
2645
                      && opno != eopno
2646
                      /* Ignore things like match_operator operands.  */
2647
                      && *recog_data.constraints[opno] != 0
2648
                      && ! (matching_operands[opno] == eopno
2649
                            && operands_match_p (recog_data.operand[opno],
2650
                                                 recog_data.operand[eopno]))
2651
                      && ! safe_from_earlyclobber (recog_data.operand[opno],
2652
                                                   recog_data.operand[eopno]))
2653
                    lose = 1;
2654
 
2655
          if (! lose)
2656
            {
2657
              while (--funny_match_index >= 0)
2658
                {
2659
                  recog_data.operand[funny_match[funny_match_index].other]
2660
                    = recog_data.operand[funny_match[funny_match_index].this];
2661
                }
2662
 
2663
              return 1;
2664
            }
2665
        }
2666
 
2667
      which_alternative++;
2668
    }
2669
  while (which_alternative < recog_data.n_alternatives);
2670
 
2671
  which_alternative = -1;
2672
  /* If we are about to reject this, but we are not to test strictly,
2673
     try a very loose test.  Only return failure if it fails also.  */
2674
  if (strict == 0)
2675
    return constrain_operands (-1);
2676
  else
2677
    return 0;
2678
}
2679
 
2680
/* Return 1 iff OPERAND (assumed to be a REG rtx)
2681
   is a hard reg in class CLASS when its regno is offset by OFFSET
2682
   and changed to mode MODE.
2683
   If REG occupies multiple hard regs, all of them must be in CLASS.  */
2684
 
2685
int
2686
reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2687
                  enum machine_mode mode)
2688
{
2689
  int regno = REGNO (operand);
2690
  if (regno < FIRST_PSEUDO_REGISTER
2691
      && TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2692
                            regno + offset))
2693
    {
2694
      int sr;
2695
      regno += offset;
2696
      for (sr = hard_regno_nregs[regno][mode] - 1;
2697
           sr > 0; sr--)
2698
        if (! TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2699
                                 regno + sr))
2700
          break;
2701
      return sr == 0;
2702
    }
2703
 
2704
  return 0;
2705
}
2706
 
2707
/* Split single instruction.  Helper function for split_all_insns and
2708
   split_all_insns_noflow.  Return last insn in the sequence if successful,
2709
   or NULL if unsuccessful.  */
2710
 
2711
static rtx
2712
split_insn (rtx insn)
2713
{
2714
  /* Split insns here to get max fine-grain parallelism.  */
2715
  rtx first = PREV_INSN (insn);
2716
  rtx last = try_split (PATTERN (insn), insn, 1);
2717
 
2718
  if (last == insn)
2719
    return NULL_RTX;
2720
 
2721
  /* try_split returns the NOTE that INSN became.  */
2722
  SET_INSN_DELETED (insn);
2723
 
2724
  /* ??? Coddle to md files that generate subregs in post-reload
2725
     splitters instead of computing the proper hard register.  */
2726
  if (reload_completed && first != last)
2727
    {
2728
      first = NEXT_INSN (first);
2729
      for (;;)
2730
        {
2731
          if (INSN_P (first))
2732
            cleanup_subreg_operands (first);
2733
          if (first == last)
2734
            break;
2735
          first = NEXT_INSN (first);
2736
        }
2737
    }
2738
  return last;
2739
}
2740
 
2741
/* Split all insns in the function.  If UPD_LIFE, update life info after.  */
2742
 
2743
void
2744
split_all_insns (int upd_life)
2745
{
2746
  sbitmap blocks;
2747
  bool changed;
2748
  basic_block bb;
2749
 
2750
  blocks = sbitmap_alloc (last_basic_block);
2751
  sbitmap_zero (blocks);
2752
  changed = false;
2753
 
2754
  FOR_EACH_BB_REVERSE (bb)
2755
    {
2756
      rtx insn, next;
2757
      bool finish = false;
2758
 
2759
      for (insn = BB_HEAD (bb); !finish ; insn = next)
2760
        {
2761
          /* Can't use `next_real_insn' because that might go across
2762
             CODE_LABELS and short-out basic blocks.  */
2763
          next = NEXT_INSN (insn);
2764
          finish = (insn == BB_END (bb));
2765
          if (INSN_P (insn))
2766
            {
2767
              rtx set = single_set (insn);
2768
 
2769
              /* Don't split no-op move insns.  These should silently
2770
                 disappear later in final.  Splitting such insns would
2771
                 break the code that handles REG_NO_CONFLICT blocks.  */
2772
              if (set && set_noop_p (set))
2773
                {
2774
                  /* Nops get in the way while scheduling, so delete them
2775
                     now if register allocation has already been done.  It
2776
                     is too risky to try to do this before register
2777
                     allocation, and there are unlikely to be very many
2778
                     nops then anyways.  */
2779
                  if (reload_completed)
2780
                    {
2781
                      /* If the no-op set has a REG_UNUSED note, we need
2782
                         to update liveness information.  */
2783
                      if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2784
                        {
2785
                          SET_BIT (blocks, bb->index);
2786
                          changed = true;
2787
                        }
2788
                      /* ??? Is life info affected by deleting edges?  */
2789
                      delete_insn_and_edges (insn);
2790
                    }
2791
                }
2792
              else
2793
                {
2794
                  rtx last = split_insn (insn);
2795
                  if (last)
2796
                    {
2797
                      /* The split sequence may include barrier, but the
2798
                         BB boundary we are interested in will be set to
2799
                         previous one.  */
2800
 
2801
                      while (BARRIER_P (last))
2802
                        last = PREV_INSN (last);
2803
                      SET_BIT (blocks, bb->index);
2804
                      changed = true;
2805
                    }
2806
                }
2807
            }
2808
        }
2809
    }
2810
 
2811
  if (changed)
2812
    {
2813
      int old_last_basic_block = last_basic_block;
2814
 
2815
      find_many_sub_basic_blocks (blocks);
2816
 
2817
      if (old_last_basic_block != last_basic_block && upd_life)
2818
        blocks = sbitmap_resize (blocks, last_basic_block, 1);
2819
    }
2820
 
2821
  if (changed && upd_life)
2822
    update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2823
                      PROP_DEATH_NOTES);
2824
 
2825
#ifdef ENABLE_CHECKING
2826
  verify_flow_info ();
2827
#endif
2828
 
2829
  sbitmap_free (blocks);
2830
}
2831
 
2832
/* Same as split_all_insns, but do not expect CFG to be available.
2833
   Used by machine dependent reorg passes.  */
2834
 
2835
void
2836
split_all_insns_noflow (void)
2837
{
2838
  rtx next, insn;
2839
 
2840
  for (insn = get_insns (); insn; insn = next)
2841
    {
2842
      next = NEXT_INSN (insn);
2843
      if (INSN_P (insn))
2844
        {
2845
          /* Don't split no-op move insns.  These should silently
2846
             disappear later in final.  Splitting such insns would
2847
             break the code that handles REG_NO_CONFLICT blocks.  */
2848
          rtx set = single_set (insn);
2849
          if (set && set_noop_p (set))
2850
            {
2851
              /* Nops get in the way while scheduling, so delete them
2852
                 now if register allocation has already been done.  It
2853
                 is too risky to try to do this before register
2854
                 allocation, and there are unlikely to be very many
2855
                 nops then anyways.
2856
 
2857
                 ??? Should we use delete_insn when the CFG isn't valid?  */
2858
              if (reload_completed)
2859
                delete_insn_and_edges (insn);
2860
            }
2861
          else
2862
            split_insn (insn);
2863
        }
2864
    }
2865
}
2866
 
2867
#ifdef HAVE_peephole2
2868
struct peep2_insn_data
2869
{
2870
  rtx insn;
2871
  regset live_before;
2872
};
2873
 
2874
static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2875
static int peep2_current;
2876
/* The number of instructions available to match a peep2.  */
2877
int peep2_current_count;
2878
 
2879
/* A non-insn marker indicating the last insn of the block.
2880
   The live_before regset for this element is correct, indicating
2881
   global_live_at_end for the block.  */
2882
#define PEEP2_EOB       pc_rtx
2883
 
2884
/* Return the Nth non-note insn after `current', or return NULL_RTX if it
2885
   does not exist.  Used by the recognizer to find the next insn to match
2886
   in a multi-insn pattern.  */
2887
 
2888
rtx
2889
peep2_next_insn (int n)
2890
{
2891
  gcc_assert (n <= peep2_current_count);
2892
 
2893
  n += peep2_current;
2894
  if (n >= MAX_INSNS_PER_PEEP2 + 1)
2895
    n -= MAX_INSNS_PER_PEEP2 + 1;
2896
 
2897
  return peep2_insn_data[n].insn;
2898
}
2899
 
2900
/* Return true if REGNO is dead before the Nth non-note insn
2901
   after `current'.  */
2902
 
2903
int
2904
peep2_regno_dead_p (int ofs, int regno)
2905
{
2906
  gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2907
 
2908
  ofs += peep2_current;
2909
  if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2910
    ofs -= MAX_INSNS_PER_PEEP2 + 1;
2911
 
2912
  gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2913
 
2914
  return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2915
}
2916
 
2917
/* Similarly for a REG.  */
2918
 
2919
int
2920
peep2_reg_dead_p (int ofs, rtx reg)
2921
{
2922
  int regno, n;
2923
 
2924
  gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2925
 
2926
  ofs += peep2_current;
2927
  if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2928
    ofs -= MAX_INSNS_PER_PEEP2 + 1;
2929
 
2930
  gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2931
 
2932
  regno = REGNO (reg);
2933
  n = hard_regno_nregs[regno][GET_MODE (reg)];
2934
  while (--n >= 0)
2935
    if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2936
      return 0;
2937
  return 1;
2938
}
2939
 
2940
/* Try to find a hard register of mode MODE, matching the register class in
2941
   CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2942
   remains available until the end of LAST_INSN.  LAST_INSN may be NULL_RTX,
2943
   in which case the only condition is that the register must be available
2944
   before CURRENT_INSN.
2945
   Registers that already have bits set in REG_SET will not be considered.
2946
 
2947
   If an appropriate register is available, it will be returned and the
2948
   corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2949
   returned.  */
2950
 
2951
rtx
2952
peep2_find_free_register (int from, int to, const char *class_str,
2953
                          enum machine_mode mode, HARD_REG_SET *reg_set)
2954
{
2955
  static int search_ofs;
2956
  enum reg_class cl;
2957
  HARD_REG_SET live;
2958
  int i;
2959
 
2960
  gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2961
  gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2962
 
2963
  from += peep2_current;
2964
  if (from >= MAX_INSNS_PER_PEEP2 + 1)
2965
    from -= MAX_INSNS_PER_PEEP2 + 1;
2966
  to += peep2_current;
2967
  if (to >= MAX_INSNS_PER_PEEP2 + 1)
2968
    to -= MAX_INSNS_PER_PEEP2 + 1;
2969
 
2970
  gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2971
  REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2972
 
2973
  while (from != to)
2974
    {
2975
      HARD_REG_SET this_live;
2976
 
2977
      if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2978
        from = 0;
2979
      gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2980
      REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2981
      IOR_HARD_REG_SET (live, this_live);
2982
    }
2983
 
2984
  cl = (class_str[0] == 'r' ? GENERAL_REGS
2985
           : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2986
 
2987
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2988
    {
2989
      int raw_regno, regno, success, j;
2990
 
2991
      /* Distribute the free registers as much as possible.  */
2992
      raw_regno = search_ofs + i;
2993
      if (raw_regno >= FIRST_PSEUDO_REGISTER)
2994
        raw_regno -= FIRST_PSEUDO_REGISTER;
2995
#ifdef REG_ALLOC_ORDER
2996
      regno = reg_alloc_order[raw_regno];
2997
#else
2998
      regno = raw_regno;
2999
#endif
3000
 
3001
      /* Don't allocate fixed registers.  */
3002
      if (fixed_regs[regno])
3003
        continue;
3004
      /* Make sure the register is of the right class.  */
3005
      if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3006
        continue;
3007
      /* And can support the mode we need.  */
3008
      if (! HARD_REGNO_MODE_OK (regno, mode))
3009
        continue;
3010
      /* And that we don't create an extra save/restore.  */
3011
      if (! call_used_regs[regno] && ! regs_ever_live[regno])
3012
        continue;
3013
      /* And we don't clobber traceback for noreturn functions.  */
3014
      if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3015
          && (! reload_completed || frame_pointer_needed))
3016
        continue;
3017
 
3018
      success = 1;
3019
      for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3020
        {
3021
          if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3022
              || TEST_HARD_REG_BIT (live, regno + j))
3023
            {
3024
              success = 0;
3025
              break;
3026
            }
3027
        }
3028
      if (success)
3029
        {
3030
          for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3031
            SET_HARD_REG_BIT (*reg_set, regno + j);
3032
 
3033
          /* Start the next search with the next register.  */
3034
          if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3035
            raw_regno = 0;
3036
          search_ofs = raw_regno;
3037
 
3038
          return gen_rtx_REG (mode, regno);
3039
        }
3040
    }
3041
 
3042
  search_ofs = 0;
3043
  return NULL_RTX;
3044
}
3045
 
3046
/* Perform the peephole2 optimization pass.  */
3047
 
3048
void
3049
peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
3050
{
3051
  rtx insn, prev;
3052
  regset live;
3053
  int i;
3054
  basic_block bb;
3055
#ifdef HAVE_conditional_execution
3056
  sbitmap blocks;
3057
  bool changed;
3058
#endif
3059
  bool do_cleanup_cfg = false;
3060
  bool do_global_life_update = false;
3061
  bool do_rebuild_jump_labels = false;
3062
 
3063
  /* Initialize the regsets we're going to use.  */
3064
  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3065
    peep2_insn_data[i].live_before = ALLOC_REG_SET (&reg_obstack);
3066
  live = ALLOC_REG_SET (&reg_obstack);
3067
 
3068
#ifdef HAVE_conditional_execution
3069
  blocks = sbitmap_alloc (last_basic_block);
3070
  sbitmap_zero (blocks);
3071
  changed = false;
3072
#else
3073
  count_or_remove_death_notes (NULL, 1);
3074
#endif
3075
 
3076
  FOR_EACH_BB_REVERSE (bb)
3077
    {
3078
      struct propagate_block_info *pbi;
3079
      reg_set_iterator rsi;
3080
      unsigned int j;
3081
 
3082
      /* Indicate that all slots except the last holds invalid data.  */
3083
      for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3084
        peep2_insn_data[i].insn = NULL_RTX;
3085
      peep2_current_count = 0;
3086
 
3087
      /* Indicate that the last slot contains live_after data.  */
3088
      peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3089
      peep2_current = MAX_INSNS_PER_PEEP2;
3090
 
3091
      /* Start up propagation.  */
3092
      COPY_REG_SET (live, bb->il.rtl->global_live_at_end);
3093
      COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3094
 
3095
#ifdef HAVE_conditional_execution
3096
      pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3097
#else
3098
      pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3099
#endif
3100
 
3101
      for (insn = BB_END (bb); ; insn = prev)
3102
        {
3103
          prev = PREV_INSN (insn);
3104
          if (INSN_P (insn))
3105
            {
3106
              rtx try, before_try, x;
3107
              int match_len;
3108
              rtx note;
3109
              bool was_call = false;
3110
 
3111
              /* Record this insn.  */
3112
              if (--peep2_current < 0)
3113
                peep2_current = MAX_INSNS_PER_PEEP2;
3114
              if (peep2_current_count < MAX_INSNS_PER_PEEP2
3115
                  && peep2_insn_data[peep2_current].insn == NULL_RTX)
3116
                peep2_current_count++;
3117
              peep2_insn_data[peep2_current].insn = insn;
3118
              propagate_one_insn (pbi, insn);
3119
              COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3120
 
3121
              if (RTX_FRAME_RELATED_P (insn))
3122
                {
3123
                  /* If an insn has RTX_FRAME_RELATED_P set, peephole
3124
                     substitution would lose the
3125
                     REG_FRAME_RELATED_EXPR that is attached.  */
3126
                  peep2_current_count = 0;
3127
                  try = NULL;
3128
                }
3129
              else
3130
                /* Match the peephole.  */
3131
                try = peephole2_insns (PATTERN (insn), insn, &match_len);
3132
 
3133
              if (try != NULL)
3134
                {
3135
                  /* If we are splitting a CALL_INSN, look for the CALL_INSN
3136
                     in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3137
                     cfg-related call notes.  */
3138
                  for (i = 0; i <= match_len; ++i)
3139
                    {
3140
                      int j;
3141
                      rtx old_insn, new_insn, note;
3142
 
3143
                      j = i + peep2_current;
3144
                      if (j >= MAX_INSNS_PER_PEEP2 + 1)
3145
                        j -= MAX_INSNS_PER_PEEP2 + 1;
3146
                      old_insn = peep2_insn_data[j].insn;
3147
                      if (!CALL_P (old_insn))
3148
                        continue;
3149
                      was_call = true;
3150
 
3151
                      new_insn = try;
3152
                      while (new_insn != NULL_RTX)
3153
                        {
3154
                          if (CALL_P (new_insn))
3155
                            break;
3156
                          new_insn = NEXT_INSN (new_insn);
3157
                        }
3158
 
3159
                      gcc_assert (new_insn != NULL_RTX);
3160
 
3161
                      CALL_INSN_FUNCTION_USAGE (new_insn)
3162
                        = CALL_INSN_FUNCTION_USAGE (old_insn);
3163
 
3164
                      for (note = REG_NOTES (old_insn);
3165
                           note;
3166
                           note = XEXP (note, 1))
3167
                        switch (REG_NOTE_KIND (note))
3168
                          {
3169
                          case REG_NORETURN:
3170
                          case REG_SETJMP:
3171
                            REG_NOTES (new_insn)
3172
                              = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3173
                                                   XEXP (note, 0),
3174
                                                   REG_NOTES (new_insn));
3175
                          default:
3176
                            /* Discard all other reg notes.  */
3177
                            break;
3178
                          }
3179
 
3180
                      /* Croak if there is another call in the sequence.  */
3181
                      while (++i <= match_len)
3182
                        {
3183
                          j = i + peep2_current;
3184
                          if (j >= MAX_INSNS_PER_PEEP2 + 1)
3185
                            j -= MAX_INSNS_PER_PEEP2 + 1;
3186
                          old_insn = peep2_insn_data[j].insn;
3187
                          gcc_assert (!CALL_P (old_insn));
3188
                        }
3189
                      break;
3190
                    }
3191
 
3192
                  i = match_len + peep2_current;
3193
                  if (i >= MAX_INSNS_PER_PEEP2 + 1)
3194
                    i -= MAX_INSNS_PER_PEEP2 + 1;
3195
 
3196
                  note = find_reg_note (peep2_insn_data[i].insn,
3197
                                        REG_EH_REGION, NULL_RTX);
3198
 
3199
                  /* Replace the old sequence with the new.  */
3200
                  try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3201
                                                INSN_LOCATOR (peep2_insn_data[i].insn));
3202
                  before_try = PREV_INSN (insn);
3203
                  delete_insn_chain (insn, peep2_insn_data[i].insn);
3204
 
3205
                  /* Re-insert the EH_REGION notes.  */
3206
                  if (note || (was_call && nonlocal_goto_handler_labels))
3207
                    {
3208
                      edge eh_edge;
3209
                      edge_iterator ei;
3210
 
3211
                      FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3212
                        if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3213
                          break;
3214
 
3215
                      for (x = try ; x != before_try ; x = PREV_INSN (x))
3216
                        if (CALL_P (x)
3217
                            || (flag_non_call_exceptions
3218
                                && may_trap_p (PATTERN (x))
3219
                                && !find_reg_note (x, REG_EH_REGION, NULL)))
3220
                          {
3221
                            if (note)
3222
                              REG_NOTES (x)
3223
                                = gen_rtx_EXPR_LIST (REG_EH_REGION,
3224
                                                     XEXP (note, 0),
3225
                                                     REG_NOTES (x));
3226
 
3227
                            if (x != BB_END (bb) && eh_edge)
3228
                              {
3229
                                edge nfte, nehe;
3230
                                int flags;
3231
 
3232
                                nfte = split_block (bb, x);
3233
                                flags = (eh_edge->flags
3234
                                         & (EDGE_EH | EDGE_ABNORMAL));
3235
                                if (CALL_P (x))
3236
                                  flags |= EDGE_ABNORMAL_CALL;
3237
                                nehe = make_edge (nfte->src, eh_edge->dest,
3238
                                                  flags);
3239
 
3240
                                nehe->probability = eh_edge->probability;
3241
                                nfte->probability
3242
                                  = REG_BR_PROB_BASE - nehe->probability;
3243
 
3244
                                do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3245
#ifdef HAVE_conditional_execution
3246
                                SET_BIT (blocks, nfte->dest->index);
3247
                                changed = true;
3248
#endif
3249
                                bb = nfte->src;
3250
                                eh_edge = nehe;
3251
                              }
3252
                          }
3253
 
3254
                      /* Converting possibly trapping insn to non-trapping is
3255
                         possible.  Zap dummy outgoing edges.  */
3256
                      do_cleanup_cfg |= purge_dead_edges (bb);
3257
                    }
3258
 
3259
#ifdef HAVE_conditional_execution
3260
                  /* With conditional execution, we cannot back up the
3261
                     live information so easily, since the conditional
3262
                     death data structures are not so self-contained.
3263
                     So record that we've made a modification to this
3264
                     block and update life information at the end.  */
3265
                  SET_BIT (blocks, bb->index);
3266
                  changed = true;
3267
 
3268
                  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3269
                    peep2_insn_data[i].insn = NULL_RTX;
3270
                  peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3271
                  peep2_current_count = 0;
3272
#else
3273
                  /* Back up lifetime information past the end of the
3274
                     newly created sequence.  */
3275
                  if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3276
                    i = 0;
3277
                  COPY_REG_SET (live, peep2_insn_data[i].live_before);
3278
 
3279
                  /* Update life information for the new sequence.  */
3280
                  x = try;
3281
                  do
3282
                    {
3283
                      if (INSN_P (x))
3284
                        {
3285
                          if (--i < 0)
3286
                            i = MAX_INSNS_PER_PEEP2;
3287
                          if (peep2_current_count < MAX_INSNS_PER_PEEP2
3288
                              && peep2_insn_data[i].insn == NULL_RTX)
3289
                            peep2_current_count++;
3290
                          peep2_insn_data[i].insn = x;
3291
                          propagate_one_insn (pbi, x);
3292
                          COPY_REG_SET (peep2_insn_data[i].live_before, live);
3293
                        }
3294
                      x = PREV_INSN (x);
3295
                    }
3296
                  while (x != prev);
3297
 
3298
                  /* ??? Should verify that LIVE now matches what we
3299
                     had before the new sequence.  */
3300
 
3301
                  peep2_current = i;
3302
#endif
3303
 
3304
                  /* If we generated a jump instruction, it won't have
3305
                     JUMP_LABEL set.  Recompute after we're done.  */
3306
                  for (x = try; x != before_try; x = PREV_INSN (x))
3307
                    if (JUMP_P (x))
3308
                      {
3309
                        do_rebuild_jump_labels = true;
3310
                        break;
3311
                      }
3312
                }
3313
            }
3314
 
3315
          if (insn == BB_HEAD (bb))
3316
            break;
3317
        }
3318
 
3319
      /* Some peepholes can decide the don't need one or more of their
3320
         inputs.  If this happens, local life update is not enough.  */
3321
      EXECUTE_IF_AND_COMPL_IN_BITMAP (bb->il.rtl->global_live_at_start, live,
3322
                                      0, j, rsi)
3323
        {
3324
          do_global_life_update = true;
3325
          break;
3326
        }
3327
 
3328
      free_propagate_block_info (pbi);
3329
    }
3330
 
3331
  for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3332
    FREE_REG_SET (peep2_insn_data[i].live_before);
3333
  FREE_REG_SET (live);
3334
 
3335
  if (do_rebuild_jump_labels)
3336
    rebuild_jump_labels (get_insns ());
3337
 
3338
  /* If we eliminated EH edges, we may be able to merge blocks.  Further,
3339
     we've changed global life since exception handlers are no longer
3340
     reachable.  */
3341
  if (do_cleanup_cfg)
3342
    {
3343
      cleanup_cfg (0);
3344
      do_global_life_update = true;
3345
    }
3346
  if (do_global_life_update)
3347
    update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3348
#ifdef HAVE_conditional_execution
3349
  else
3350
    {
3351
      count_or_remove_death_notes (blocks, 1);
3352
      update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3353
    }
3354
  sbitmap_free (blocks);
3355
#endif
3356
}
3357
#endif /* HAVE_peephole2 */
3358
 
3359
/* Common predicates for use with define_bypass.  */
3360
 
3361
/* True if the dependency between OUT_INSN and IN_INSN is on the store
3362
   data not the address operand(s) of the store.  IN_INSN must be
3363
   single_set.  OUT_INSN must be either a single_set or a PARALLEL with
3364
   SETs inside.  */
3365
 
3366
int
3367
store_data_bypass_p (rtx out_insn, rtx in_insn)
3368
{
3369
  rtx out_set, in_set;
3370
 
3371
  in_set = single_set (in_insn);
3372
  gcc_assert (in_set);
3373
 
3374
  if (!MEM_P (SET_DEST (in_set)))
3375
    return false;
3376
 
3377
  out_set = single_set (out_insn);
3378
  if (out_set)
3379
    {
3380
      if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3381
        return false;
3382
    }
3383
  else
3384
    {
3385
      rtx out_pat;
3386
      int i;
3387
 
3388
      out_pat = PATTERN (out_insn);
3389
      gcc_assert (GET_CODE (out_pat) == PARALLEL);
3390
 
3391
      for (i = 0; i < XVECLEN (out_pat, 0); i++)
3392
        {
3393
          rtx exp = XVECEXP (out_pat, 0, i);
3394
 
3395
          if (GET_CODE (exp) == CLOBBER)
3396
            continue;
3397
 
3398
          gcc_assert (GET_CODE (exp) == SET);
3399
 
3400
          if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3401
            return false;
3402
        }
3403
    }
3404
 
3405
  return true;
3406
}
3407
 
3408
/* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3409
   condition, and not the THEN or ELSE branch.  OUT_INSN may be either a single
3410
   or multiple set; IN_INSN should be single_set for truth, but for convenience
3411
   of insn categorization may be any JUMP or CALL insn.  */
3412
 
3413
int
3414
if_test_bypass_p (rtx out_insn, rtx in_insn)
3415
{
3416
  rtx out_set, in_set;
3417
 
3418
  in_set = single_set (in_insn);
3419
  if (! in_set)
3420
    {
3421
      gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3422
      return false;
3423
    }
3424
 
3425
  if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3426
    return false;
3427
  in_set = SET_SRC (in_set);
3428
 
3429
  out_set = single_set (out_insn);
3430
  if (out_set)
3431
    {
3432
      if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3433
          || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3434
        return false;
3435
    }
3436
  else
3437
    {
3438
      rtx out_pat;
3439
      int i;
3440
 
3441
      out_pat = PATTERN (out_insn);
3442
      gcc_assert (GET_CODE (out_pat) == PARALLEL);
3443
 
3444
      for (i = 0; i < XVECLEN (out_pat, 0); i++)
3445
        {
3446
          rtx exp = XVECEXP (out_pat, 0, i);
3447
 
3448
          if (GET_CODE (exp) == CLOBBER)
3449
            continue;
3450
 
3451
          gcc_assert (GET_CODE (exp) == SET);
3452
 
3453
          if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3454
              || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3455
            return false;
3456
        }
3457
    }
3458
 
3459
  return true;
3460
}
3461
 
3462
static bool
3463
gate_handle_peephole2 (void)
3464
{
3465
  return (optimize > 0 && flag_peephole2);
3466
}
3467
 
3468
static void
3469
rest_of_handle_peephole2 (void)
3470
{
3471
#ifdef HAVE_peephole2
3472
  peephole2_optimize (dump_file);
3473
#endif
3474
}
3475
 
3476
struct tree_opt_pass pass_peephole2 =
3477
{
3478
  "peephole2",                          /* name */
3479
  gate_handle_peephole2,                /* gate */
3480
  rest_of_handle_peephole2,             /* execute */
3481
  NULL,                                 /* sub */
3482
  NULL,                                 /* next */
3483
  0,                                    /* static_pass_number */
3484
  TV_PEEPHOLE2,                         /* tv_id */
3485
  0,                                    /* properties_required */
3486
  0,                                    /* properties_provided */
3487
  0,                                    /* properties_destroyed */
3488
  0,                                    /* todo_flags_start */
3489
  TODO_dump_func,                       /* todo_flags_finish */
3490
  'z'                                   /* letter */
3491
};
3492
 
3493
static void
3494
rest_of_handle_split_all_insns (void)
3495
{
3496
  split_all_insns (1);
3497
}
3498
 
3499
struct tree_opt_pass pass_split_all_insns =
3500
{
3501
  "split1",                             /* name */
3502
  NULL,                                 /* gate */
3503
  rest_of_handle_split_all_insns,       /* execute */
3504
  NULL,                                 /* sub */
3505
  NULL,                                 /* next */
3506
  0,                                    /* static_pass_number */
3507
  0,                                    /* tv_id */
3508
  0,                                    /* properties_required */
3509
  0,                                    /* properties_provided */
3510
  0,                                    /* properties_destroyed */
3511
  0,                                    /* todo_flags_start */
3512
  TODO_dump_func,                       /* todo_flags_finish */
3513
 
3514
};
3515
 
3516
/* The placement of the splitting that we do for shorten_branches
3517
   depends on whether regstack is used by the target or not.  */
3518
static bool
3519
gate_do_final_split (void)
3520
{
3521
#if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3522
  return 1;
3523
#else
3524
  return 0;
3525
#endif 
3526
}
3527
 
3528
struct tree_opt_pass pass_split_for_shorten_branches =
3529
{
3530
  "split3",                             /* name */
3531
  gate_do_final_split,                  /* gate */
3532
  split_all_insns_noflow,               /* execute */
3533
  NULL,                                 /* sub */
3534
  NULL,                                 /* next */
3535
  0,                                    /* static_pass_number */
3536
  TV_SHORTEN_BRANCH,                    /* tv_id */
3537
  0,                                    /* properties_required */
3538
  0,                                    /* properties_provided */
3539
  0,                                    /* properties_destroyed */
3540
  0,                                    /* todo_flags_start */
3541
  TODO_dump_func,                       /* todo_flags_finish */
3542
 
3543
};
3544
 
3545
 
3546
static bool
3547
gate_handle_split_before_regstack (void)
3548
{
3549
#if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3550
  /* If flow2 creates new instructions which need splitting
3551
     and scheduling after reload is not done, they might not be
3552
     split until final which doesn't allow splitting
3553
     if HAVE_ATTR_length.  */
3554
# ifdef INSN_SCHEDULING
3555
  return (optimize && !flag_schedule_insns_after_reload);
3556
# else
3557
  return (optimize);
3558
# endif
3559
#else
3560
  return 0;
3561
#endif
3562
}
3563
 
3564
struct tree_opt_pass pass_split_before_regstack =
3565
{
3566
  "split2",                             /* name */
3567
  gate_handle_split_before_regstack,    /* gate */
3568
  rest_of_handle_split_all_insns,       /* execute */
3569
  NULL,                                 /* sub */
3570
  NULL,                                 /* next */
3571
  0,                                    /* static_pass_number */
3572
  TV_SHORTEN_BRANCH,                    /* tv_id */
3573
  0,                                    /* properties_required */
3574
  0,                                    /* properties_provided */
3575
  0,                                    /* properties_destroyed */
3576
  0,                                    /* todo_flags_start */
3577
  TODO_dump_func,                       /* todo_flags_finish */
3578
 
3579
};
3580
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.