OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [jump.c] - Blame information for rev 297

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Optimize jump instructions, for GNU compiler.
2
   Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3
   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
/* This is the pathetic reminder of old fame of the jump-optimization pass
23
   of the compiler.  Now it contains basically a set of utility functions to
24
   operate with jumps.
25
 
26
   Each CODE_LABEL has a count of the times it is used
27
   stored in the LABEL_NUSES internal field, and each JUMP_INSN
28
   has one label that it refers to stored in the
29
   JUMP_LABEL internal field.  With this we can detect labels that
30
   become unused because of the deletion of all the jumps that
31
   formerly used them.  The JUMP_LABEL info is sometimes looked
32
   at by later passes.
33
 
34
   The subroutines redirect_jump and invert_jump are used
35
   from other passes as well.  */
36
 
37
#include "config.h"
38
#include "system.h"
39
#include "coretypes.h"
40
#include "tm.h"
41
#include "rtl.h"
42
#include "tm_p.h"
43
#include "flags.h"
44
#include "hard-reg-set.h"
45
#include "regs.h"
46
#include "insn-config.h"
47
#include "insn-attr.h"
48
#include "recog.h"
49
#include "function.h"
50
#include "expr.h"
51
#include "real.h"
52
#include "except.h"
53
#include "diagnostic.h"
54
#include "toplev.h"
55
#include "reload.h"
56
#include "predict.h"
57
#include "timevar.h"
58
#include "tree-pass.h"
59
#include "target.h"
60
 
61
/* Optimize jump y; x: ... y: jumpif... x?
62
   Don't know if it is worth bothering with.  */
63
/* Optimize two cases of conditional jump to conditional jump?
64
   This can never delete any instruction or make anything dead,
65
   or even change what is live at any point.
66
   So perhaps let combiner do it.  */
67
 
68
static void init_label_info (rtx);
69
static void mark_all_labels (rtx);
70
static void mark_jump_label_1 (rtx, rtx, bool, bool);
71
static void mark_jump_label_asm (rtx, rtx);
72
static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
73
static int invert_exp_1 (rtx, rtx);
74
static int returnjump_p_1 (rtx *, void *);
75
 
76
/* This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET
77
   notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping
78
   instructions and jumping insns that have labels as operands
79
   (e.g. cbranchsi4).  */
80
void
81
rebuild_jump_labels (rtx f)
82
{
83
  rtx insn;
84
 
85
  timevar_push (TV_REBUILD_JUMP);
86
  init_label_info (f);
87
  mark_all_labels (f);
88
 
89
  /* Keep track of labels used from static data; we don't track them
90
     closely enough to delete them here, so make sure their reference
91
     count doesn't drop to zero.  */
92
 
93
  for (insn = forced_labels; insn; insn = XEXP (insn, 1))
94
    if (LABEL_P (XEXP (insn, 0)))
95
      LABEL_NUSES (XEXP (insn, 0))++;
96
  timevar_pop (TV_REBUILD_JUMP);
97
}
98
 
99
/* Some old code expects exactly one BARRIER as the NEXT_INSN of a
100
   non-fallthru insn.  This is not generally true, as multiple barriers
101
   may have crept in, or the BARRIER may be separated from the last
102
   real insn by one or more NOTEs.
103
 
104
   This simple pass moves barriers and removes duplicates so that the
105
   old code is happy.
106
 */
107
unsigned int
108
cleanup_barriers (void)
109
{
110
  rtx insn, next, prev;
111
  for (insn = get_insns (); insn; insn = next)
112
    {
113
      next = NEXT_INSN (insn);
114
      if (BARRIER_P (insn))
115
        {
116
          prev = prev_nonnote_insn (insn);
117
          if (!prev)
118
            continue;
119
          if (BARRIER_P (prev))
120
            delete_insn (insn);
121
          else if (prev != PREV_INSN (insn))
122
            reorder_insns (insn, insn, prev);
123
        }
124
    }
125
  return 0;
126
}
127
 
128
struct rtl_opt_pass pass_cleanup_barriers =
129
{
130
 {
131
  RTL_PASS,
132
  "barriers",                           /* name */
133
  NULL,                                 /* gate */
134
  cleanup_barriers,                     /* execute */
135
  NULL,                                 /* sub */
136
  NULL,                                 /* next */
137
  0,                                    /* static_pass_number */
138
  TV_NONE,                              /* tv_id */
139
  0,                                    /* properties_required */
140
  0,                                    /* properties_provided */
141
  0,                                    /* properties_destroyed */
142
  0,                                    /* todo_flags_start */
143
  TODO_dump_func                        /* todo_flags_finish */
144
 }
145
};
146
 
147
 
148
/* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET
149
   for remaining targets for JUMP_P.  Delete any REG_LABEL_OPERAND
150
   notes whose labels don't occur in the insn any more.  */
151
 
152
static void
153
init_label_info (rtx f)
154
{
155
  rtx insn;
156
 
157
  for (insn = f; insn; insn = NEXT_INSN (insn))
158
    {
159
      if (LABEL_P (insn))
160
        LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
161
 
162
      /* REG_LABEL_TARGET notes (including the JUMP_LABEL field) are
163
         sticky and not reset here; that way we won't lose association
164
         with a label when e.g. the source for a target register
165
         disappears out of reach for targets that may use jump-target
166
         registers.  Jump transformations are supposed to transform
167
         any REG_LABEL_TARGET notes.  The target label reference in a
168
         branch may disappear from the branch (and from the
169
         instruction before it) for other reasons, like register
170
         allocation.  */
171
 
172
      if (INSN_P (insn))
173
        {
174
          rtx note, next;
175
 
176
          for (note = REG_NOTES (insn); note; note = next)
177
            {
178
              next = XEXP (note, 1);
179
              if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
180
                  && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
181
                remove_note (insn, note);
182
            }
183
        }
184
    }
185
}
186
 
187
/* Mark the label each jump jumps to.
188
   Combine consecutive labels, and count uses of labels.  */
189
 
190
static void
191
mark_all_labels (rtx f)
192
{
193
  rtx insn;
194
  rtx prev_nonjump_insn = NULL;
195
 
196
  for (insn = f; insn; insn = NEXT_INSN (insn))
197
    if (INSN_P (insn))
198
      {
199
        mark_jump_label (PATTERN (insn), insn, 0);
200
 
201
        /* If the previous non-jump insn sets something to a label,
202
           something that this jump insn uses, make that label the primary
203
           target of this insn if we don't yet have any.  That previous
204
           insn must be a single_set and not refer to more than one label.
205
           The jump insn must not refer to other labels as jump targets
206
           and must be a plain (set (pc) ...), maybe in a parallel, and
207
           may refer to the item being set only directly or as one of the
208
           arms in an IF_THEN_ELSE.  */
209
        if (! INSN_DELETED_P (insn)
210
            && JUMP_P (insn)
211
            && JUMP_LABEL (insn) == NULL)
212
          {
213
            rtx label_note = NULL;
214
            rtx pc = pc_set (insn);
215
            rtx pc_src = pc != NULL ? SET_SRC (pc) : NULL;
216
 
217
            if (prev_nonjump_insn != NULL)
218
              label_note
219
                = find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);
220
 
221
            if (label_note != NULL && pc_src != NULL)
222
              {
223
                rtx label_set = single_set (prev_nonjump_insn);
224
                rtx label_dest
225
                  = label_set != NULL ? SET_DEST (label_set) : NULL;
226
 
227
                if (label_set != NULL
228
                    /* The source must be the direct LABEL_REF, not a
229
                       PLUS, UNSPEC, IF_THEN_ELSE etc.  */
230
                    && GET_CODE (SET_SRC (label_set)) == LABEL_REF
231
                    && (rtx_equal_p (label_dest, pc_src)
232
                        || (GET_CODE (pc_src) == IF_THEN_ELSE
233
                            && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
234
                                || rtx_equal_p (label_dest,
235
                                                XEXP (pc_src, 2))))))
236
 
237
                  {
238
                    /* The CODE_LABEL referred to in the note must be the
239
                       CODE_LABEL in the LABEL_REF of the "set".  We can
240
                       conveniently use it for the marker function, which
241
                       requires a LABEL_REF wrapping.  */
242
                    gcc_assert (XEXP (label_note, 0)
243
                                == XEXP (SET_SRC (label_set), 0));
244
 
245
                    mark_jump_label_1 (label_set, insn, false, true);
246
                    gcc_assert (JUMP_LABEL (insn)
247
                                == XEXP (SET_SRC (label_set), 0));
248
                  }
249
              }
250
          }
251
        else if (! INSN_DELETED_P (insn))
252
          prev_nonjump_insn = insn;
253
      }
254
    else if (LABEL_P (insn))
255
      prev_nonjump_insn = NULL;
256
 
257
  /* If we are in cfglayout mode, there may be non-insns between the
258
     basic blocks.  If those non-insns represent tablejump data, they
259
     contain label references that we must record.  */
260
  if (current_ir_type () == IR_RTL_CFGLAYOUT)
261
    {
262
      basic_block bb;
263
      rtx insn;
264
      FOR_EACH_BB (bb)
265
        {
266
          for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
267
            if (INSN_P (insn))
268
              {
269
                gcc_assert (JUMP_TABLE_DATA_P (insn));
270
                mark_jump_label (PATTERN (insn), insn, 0);
271
              }
272
 
273
          for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
274
            if (INSN_P (insn))
275
              {
276
                gcc_assert (JUMP_TABLE_DATA_P (insn));
277
                mark_jump_label (PATTERN (insn), insn, 0);
278
              }
279
        }
280
    }
281
}
282
 
283
/* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
284
   of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
285
   UNKNOWN may be returned in case we are having CC_MODE compare and we don't
286
   know whether it's source is floating point or integer comparison.  Machine
287
   description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
288
   to help this function avoid overhead in these cases.  */
289
enum rtx_code
290
reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
291
                                const_rtx arg1, const_rtx insn)
292
{
293
  enum machine_mode mode;
294
 
295
  /* If this is not actually a comparison, we can't reverse it.  */
296
  if (GET_RTX_CLASS (code) != RTX_COMPARE
297
      && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
298
    return UNKNOWN;
299
 
300
  mode = GET_MODE (arg0);
301
  if (mode == VOIDmode)
302
    mode = GET_MODE (arg1);
303
 
304
  /* First see if machine description supplies us way to reverse the
305
     comparison.  Give it priority over everything else to allow
306
     machine description to do tricks.  */
307
  if (GET_MODE_CLASS (mode) == MODE_CC
308
      && REVERSIBLE_CC_MODE (mode))
309
    {
310
#ifdef REVERSE_CONDITION
311
      return REVERSE_CONDITION (code, mode);
312
#endif
313
      return reverse_condition (code);
314
    }
315
 
316
  /* Try a few special cases based on the comparison code.  */
317
  switch (code)
318
    {
319
    case GEU:
320
    case GTU:
321
    case LEU:
322
    case LTU:
323
    case NE:
324
    case EQ:
325
      /* It is always safe to reverse EQ and NE, even for the floating
326
         point.  Similarly the unsigned comparisons are never used for
327
         floating point so we can reverse them in the default way.  */
328
      return reverse_condition (code);
329
    case ORDERED:
330
    case UNORDERED:
331
    case LTGT:
332
    case UNEQ:
333
      /* In case we already see unordered comparison, we can be sure to
334
         be dealing with floating point so we don't need any more tests.  */
335
      return reverse_condition_maybe_unordered (code);
336
    case UNLT:
337
    case UNLE:
338
    case UNGT:
339
    case UNGE:
340
      /* We don't have safe way to reverse these yet.  */
341
      return UNKNOWN;
342
    default:
343
      break;
344
    }
345
 
346
  if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
347
    {
348
      const_rtx prev;
349
      /* Try to search for the comparison to determine the real mode.
350
         This code is expensive, but with sane machine description it
351
         will be never used, since REVERSIBLE_CC_MODE will return true
352
         in all cases.  */
353
      if (! insn)
354
        return UNKNOWN;
355
 
356
      /* These CONST_CAST's are okay because prev_nonnote_insn just
357
         returns its argument and we assign it to a const_rtx
358
         variable.  */
359
      for (prev = prev_nonnote_insn (CONST_CAST_RTX(insn));
360
           prev != 0 && !LABEL_P (prev);
361
           prev = prev_nonnote_insn (CONST_CAST_RTX(prev)))
362
        {
363
          const_rtx set = set_of (arg0, prev);
364
          if (set && GET_CODE (set) == SET
365
              && rtx_equal_p (SET_DEST (set), arg0))
366
            {
367
              rtx src = SET_SRC (set);
368
 
369
              if (GET_CODE (src) == COMPARE)
370
                {
371
                  rtx comparison = src;
372
                  arg0 = XEXP (src, 0);
373
                  mode = GET_MODE (arg0);
374
                  if (mode == VOIDmode)
375
                    mode = GET_MODE (XEXP (comparison, 1));
376
                  break;
377
                }
378
              /* We can get past reg-reg moves.  This may be useful for model
379
                 of i387 comparisons that first move flag registers around.  */
380
              if (REG_P (src))
381
                {
382
                  arg0 = src;
383
                  continue;
384
                }
385
            }
386
          /* If register is clobbered in some ununderstandable way,
387
             give up.  */
388
          if (set)
389
            return UNKNOWN;
390
        }
391
    }
392
 
393
  /* Test for an integer condition, or a floating-point comparison
394
     in which NaNs can be ignored.  */
395
  if (CONST_INT_P (arg0)
396
      || (GET_MODE (arg0) != VOIDmode
397
          && GET_MODE_CLASS (mode) != MODE_CC
398
          && !HONOR_NANS (mode)))
399
    return reverse_condition (code);
400
 
401
  return UNKNOWN;
402
}
403
 
404
/* A wrapper around the previous function to take COMPARISON as rtx
405
   expression.  This simplifies many callers.  */
406
enum rtx_code
407
reversed_comparison_code (const_rtx comparison, const_rtx insn)
408
{
409
  if (!COMPARISON_P (comparison))
410
    return UNKNOWN;
411
  return reversed_comparison_code_parts (GET_CODE (comparison),
412
                                         XEXP (comparison, 0),
413
                                         XEXP (comparison, 1), insn);
414
}
415
 
416
/* Return comparison with reversed code of EXP.
417
   Return NULL_RTX in case we fail to do the reversal.  */
418
rtx
419
reversed_comparison (const_rtx exp, enum machine_mode mode)
420
{
421
  enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
422
  if (reversed_code == UNKNOWN)
423
    return NULL_RTX;
424
  else
425
    return simplify_gen_relational (reversed_code, mode, VOIDmode,
426
                                    XEXP (exp, 0), XEXP (exp, 1));
427
}
428
 
429
 
430
/* Given an rtx-code for a comparison, return the code for the negated
431
   comparison.  If no such code exists, return UNKNOWN.
432
 
433
   WATCH OUT!  reverse_condition is not safe to use on a jump that might
434
   be acting on the results of an IEEE floating point comparison, because
435
   of the special treatment of non-signaling nans in comparisons.
436
   Use reversed_comparison_code instead.  */
437
 
438
enum rtx_code
439
reverse_condition (enum rtx_code code)
440
{
441
  switch (code)
442
    {
443
    case EQ:
444
      return NE;
445
    case NE:
446
      return EQ;
447
    case GT:
448
      return LE;
449
    case GE:
450
      return LT;
451
    case LT:
452
      return GE;
453
    case LE:
454
      return GT;
455
    case GTU:
456
      return LEU;
457
    case GEU:
458
      return LTU;
459
    case LTU:
460
      return GEU;
461
    case LEU:
462
      return GTU;
463
    case UNORDERED:
464
      return ORDERED;
465
    case ORDERED:
466
      return UNORDERED;
467
 
468
    case UNLT:
469
    case UNLE:
470
    case UNGT:
471
    case UNGE:
472
    case UNEQ:
473
    case LTGT:
474
      return UNKNOWN;
475
 
476
    default:
477
      gcc_unreachable ();
478
    }
479
}
480
 
481
/* Similar, but we're allowed to generate unordered comparisons, which
482
   makes it safe for IEEE floating-point.  Of course, we have to recognize
483
   that the target will support them too...  */
484
 
485
enum rtx_code
486
reverse_condition_maybe_unordered (enum rtx_code code)
487
{
488
  switch (code)
489
    {
490
    case EQ:
491
      return NE;
492
    case NE:
493
      return EQ;
494
    case GT:
495
      return UNLE;
496
    case GE:
497
      return UNLT;
498
    case LT:
499
      return UNGE;
500
    case LE:
501
      return UNGT;
502
    case LTGT:
503
      return UNEQ;
504
    case UNORDERED:
505
      return ORDERED;
506
    case ORDERED:
507
      return UNORDERED;
508
    case UNLT:
509
      return GE;
510
    case UNLE:
511
      return GT;
512
    case UNGT:
513
      return LE;
514
    case UNGE:
515
      return LT;
516
    case UNEQ:
517
      return LTGT;
518
 
519
    default:
520
      gcc_unreachable ();
521
    }
522
}
523
 
524
/* Similar, but return the code when two operands of a comparison are swapped.
525
   This IS safe for IEEE floating-point.  */
526
 
527
enum rtx_code
528
swap_condition (enum rtx_code code)
529
{
530
  switch (code)
531
    {
532
    case EQ:
533
    case NE:
534
    case UNORDERED:
535
    case ORDERED:
536
    case UNEQ:
537
    case LTGT:
538
      return code;
539
 
540
    case GT:
541
      return LT;
542
    case GE:
543
      return LE;
544
    case LT:
545
      return GT;
546
    case LE:
547
      return GE;
548
    case GTU:
549
      return LTU;
550
    case GEU:
551
      return LEU;
552
    case LTU:
553
      return GTU;
554
    case LEU:
555
      return GEU;
556
    case UNLT:
557
      return UNGT;
558
    case UNLE:
559
      return UNGE;
560
    case UNGT:
561
      return UNLT;
562
    case UNGE:
563
      return UNLE;
564
 
565
    default:
566
      gcc_unreachable ();
567
    }
568
}
569
 
570
/* Given a comparison CODE, return the corresponding unsigned comparison.
571
   If CODE is an equality comparison or already an unsigned comparison,
572
   CODE is returned.  */
573
 
574
enum rtx_code
575
unsigned_condition (enum rtx_code code)
576
{
577
  switch (code)
578
    {
579
    case EQ:
580
    case NE:
581
    case GTU:
582
    case GEU:
583
    case LTU:
584
    case LEU:
585
      return code;
586
 
587
    case GT:
588
      return GTU;
589
    case GE:
590
      return GEU;
591
    case LT:
592
      return LTU;
593
    case LE:
594
      return LEU;
595
 
596
    default:
597
      gcc_unreachable ();
598
    }
599
}
600
 
601
/* Similarly, return the signed version of a comparison.  */
602
 
603
enum rtx_code
604
signed_condition (enum rtx_code code)
605
{
606
  switch (code)
607
    {
608
    case EQ:
609
    case NE:
610
    case GT:
611
    case GE:
612
    case LT:
613
    case LE:
614
      return code;
615
 
616
    case GTU:
617
      return GT;
618
    case GEU:
619
      return GE;
620
    case LTU:
621
      return LT;
622
    case LEU:
623
      return LE;
624
 
625
    default:
626
      gcc_unreachable ();
627
    }
628
}
629
 
630
/* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
631
   truth of CODE1 implies the truth of CODE2.  */
632
 
633
int
634
comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
635
{
636
  /* UNKNOWN comparison codes can happen as a result of trying to revert
637
     comparison codes.
638
     They can't match anything, so we have to reject them here.  */
639
  if (code1 == UNKNOWN || code2 == UNKNOWN)
640
    return 0;
641
 
642
  if (code1 == code2)
643
    return 1;
644
 
645
  switch (code1)
646
    {
647
    case UNEQ:
648
      if (code2 == UNLE || code2 == UNGE)
649
        return 1;
650
      break;
651
 
652
    case EQ:
653
      if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
654
          || code2 == ORDERED)
655
        return 1;
656
      break;
657
 
658
    case UNLT:
659
      if (code2 == UNLE || code2 == NE)
660
        return 1;
661
      break;
662
 
663
    case LT:
664
      if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
665
        return 1;
666
      break;
667
 
668
    case UNGT:
669
      if (code2 == UNGE || code2 == NE)
670
        return 1;
671
      break;
672
 
673
    case GT:
674
      if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
675
        return 1;
676
      break;
677
 
678
    case GE:
679
    case LE:
680
      if (code2 == ORDERED)
681
        return 1;
682
      break;
683
 
684
    case LTGT:
685
      if (code2 == NE || code2 == ORDERED)
686
        return 1;
687
      break;
688
 
689
    case LTU:
690
      if (code2 == LEU || code2 == NE)
691
        return 1;
692
      break;
693
 
694
    case GTU:
695
      if (code2 == GEU || code2 == NE)
696
        return 1;
697
      break;
698
 
699
    case UNORDERED:
700
      if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
701
          || code2 == UNGE || code2 == UNGT)
702
        return 1;
703
      break;
704
 
705
    default:
706
      break;
707
    }
708
 
709
  return 0;
710
}
711
 
712
/* Return 1 if INSN is an unconditional jump and nothing else.  */
713
 
714
int
715
simplejump_p (const_rtx insn)
716
{
717
  return (JUMP_P (insn)
718
          && GET_CODE (PATTERN (insn)) == SET
719
          && GET_CODE (SET_DEST (PATTERN (insn))) == PC
720
          && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
721
}
722
 
723
/* Return nonzero if INSN is a (possibly) conditional jump
724
   and nothing more.
725
 
726
   Use of this function is deprecated, since we need to support combined
727
   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
728
 
729
int
730
condjump_p (const_rtx insn)
731
{
732
  const_rtx x = PATTERN (insn);
733
 
734
  if (GET_CODE (x) != SET
735
      || GET_CODE (SET_DEST (x)) != PC)
736
    return 0;
737
 
738
  x = SET_SRC (x);
739
  if (GET_CODE (x) == LABEL_REF)
740
    return 1;
741
  else
742
    return (GET_CODE (x) == IF_THEN_ELSE
743
            && ((GET_CODE (XEXP (x, 2)) == PC
744
                 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
745
                     || GET_CODE (XEXP (x, 1)) == RETURN))
746
                || (GET_CODE (XEXP (x, 1)) == PC
747
                    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
748
                        || GET_CODE (XEXP (x, 2)) == RETURN))));
749
}
750
 
751
/* Return nonzero if INSN is a (possibly) conditional jump inside a
752
   PARALLEL.
753
 
754
   Use this function is deprecated, since we need to support combined
755
   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
756
 
757
int
758
condjump_in_parallel_p (const_rtx insn)
759
{
760
  const_rtx x = PATTERN (insn);
761
 
762
  if (GET_CODE (x) != PARALLEL)
763
    return 0;
764
  else
765
    x = XVECEXP (x, 0, 0);
766
 
767
  if (GET_CODE (x) != SET)
768
    return 0;
769
  if (GET_CODE (SET_DEST (x)) != PC)
770
    return 0;
771
  if (GET_CODE (SET_SRC (x)) == LABEL_REF)
772
    return 1;
773
  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
774
    return 0;
775
  if (XEXP (SET_SRC (x), 2) == pc_rtx
776
      && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
777
          || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
778
    return 1;
779
  if (XEXP (SET_SRC (x), 1) == pc_rtx
780
      && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
781
          || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
782
    return 1;
783
  return 0;
784
}
785
 
786
/* Return set of PC, otherwise NULL.  */
787
 
788
rtx
789
pc_set (const_rtx insn)
790
{
791
  rtx pat;
792
  if (!JUMP_P (insn))
793
    return NULL_RTX;
794
  pat = PATTERN (insn);
795
 
796
  /* The set is allowed to appear either as the insn pattern or
797
     the first set in a PARALLEL.  */
798
  if (GET_CODE (pat) == PARALLEL)
799
    pat = XVECEXP (pat, 0, 0);
800
  if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
801
    return pat;
802
 
803
  return NULL_RTX;
804
}
805
 
806
/* Return true when insn is an unconditional direct jump,
807
   possibly bundled inside a PARALLEL.  */
808
 
809
int
810
any_uncondjump_p (const_rtx insn)
811
{
812
  const_rtx x = pc_set (insn);
813
  if (!x)
814
    return 0;
815
  if (GET_CODE (SET_SRC (x)) != LABEL_REF)
816
    return 0;
817
  if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
818
    return 0;
819
  return 1;
820
}
821
 
822
/* Return true when insn is a conditional jump.  This function works for
823
   instructions containing PC sets in PARALLELs.  The instruction may have
824
   various other effects so before removing the jump you must verify
825
   onlyjump_p.
826
 
827
   Note that unlike condjump_p it returns false for unconditional jumps.  */
828
 
829
int
830
any_condjump_p (const_rtx insn)
831
{
832
  const_rtx x = pc_set (insn);
833
  enum rtx_code a, b;
834
 
835
  if (!x)
836
    return 0;
837
  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
838
    return 0;
839
 
840
  a = GET_CODE (XEXP (SET_SRC (x), 1));
841
  b = GET_CODE (XEXP (SET_SRC (x), 2));
842
 
843
  return ((b == PC && (a == LABEL_REF || a == RETURN))
844
          || (a == PC && (b == LABEL_REF || b == RETURN)));
845
}
846
 
847
/* Return the label of a conditional jump.  */
848
 
849
rtx
850
condjump_label (const_rtx insn)
851
{
852
  rtx x = pc_set (insn);
853
 
854
  if (!x)
855
    return NULL_RTX;
856
  x = SET_SRC (x);
857
  if (GET_CODE (x) == LABEL_REF)
858
    return x;
859
  if (GET_CODE (x) != IF_THEN_ELSE)
860
    return NULL_RTX;
861
  if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
862
    return XEXP (x, 1);
863
  if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
864
    return XEXP (x, 2);
865
  return NULL_RTX;
866
}
867
 
868
/* Return true if INSN is a (possibly conditional) return insn.  */
869
 
870
static int
871
returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
872
{
873
  rtx x = *loc;
874
 
875
  if (x == NULL)
876
    return false;
877
 
878
  switch (GET_CODE (x))
879
    {
880
    case RETURN:
881
    case EH_RETURN:
882
      return true;
883
 
884
    case SET:
885
      return SET_IS_RETURN_P (x);
886
 
887
    default:
888
      return false;
889
    }
890
}
891
 
892
/* Return TRUE if INSN is a return jump.  */
893
 
894
int
895
returnjump_p (rtx insn)
896
{
897
  if (!JUMP_P (insn))
898
    return 0;
899
  return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
900
}
901
 
902
/* Return true if INSN is a (possibly conditional) return insn.  */
903
 
904
static int
905
eh_returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
906
{
907
  return *loc && GET_CODE (*loc) == EH_RETURN;
908
}
909
 
910
int
911
eh_returnjump_p (rtx insn)
912
{
913
  if (!JUMP_P (insn))
914
    return 0;
915
  return for_each_rtx (&PATTERN (insn), eh_returnjump_p_1, NULL);
916
}
917
 
918
/* Return true if INSN is a jump that only transfers control and
919
   nothing more.  */
920
 
921
int
922
onlyjump_p (const_rtx insn)
923
{
924
  rtx set;
925
 
926
  if (!JUMP_P (insn))
927
    return 0;
928
 
929
  set = single_set (insn);
930
  if (set == NULL)
931
    return 0;
932
  if (GET_CODE (SET_DEST (set)) != PC)
933
    return 0;
934
  if (side_effects_p (SET_SRC (set)))
935
    return 0;
936
 
937
  return 1;
938
}
939
 
940
#ifdef HAVE_cc0
941
 
942
/* Return nonzero if X is an RTX that only sets the condition codes
943
   and has no side effects.  */
944
 
945
int
946
only_sets_cc0_p (const_rtx x)
947
{
948
  if (! x)
949
    return 0;
950
 
951
  if (INSN_P (x))
952
    x = PATTERN (x);
953
 
954
  return sets_cc0_p (x) == 1 && ! side_effects_p (x);
955
}
956
 
957
/* Return 1 if X is an RTX that does nothing but set the condition codes
958
   and CLOBBER or USE registers.
959
   Return -1 if X does explicitly set the condition codes,
960
   but also does other things.  */
961
 
962
int
963
sets_cc0_p (const_rtx x)
964
{
965
  if (! x)
966
    return 0;
967
 
968
  if (INSN_P (x))
969
    x = PATTERN (x);
970
 
971
  if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
972
    return 1;
973
  if (GET_CODE (x) == PARALLEL)
974
    {
975
      int i;
976
      int sets_cc0 = 0;
977
      int other_things = 0;
978
      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
979
        {
980
          if (GET_CODE (XVECEXP (x, 0, i)) == SET
981
              && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
982
            sets_cc0 = 1;
983
          else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
984
            other_things = 1;
985
        }
986
      return ! sets_cc0 ? 0 : other_things ? -1 : 1;
987
    }
988
  return 0;
989
}
990
#endif
991
 
992
/* Find all CODE_LABELs referred to in X, and increment their use
993
   counts.  If INSN is a JUMP_INSN and there is at least one
994
   CODE_LABEL referenced in INSN as a jump target, then store the last
995
   one in JUMP_LABEL (INSN).  For a tablejump, this must be the label
996
   for the ADDR_VEC.  Store any other jump targets as REG_LABEL_TARGET
997
   notes.  If INSN is an INSN or a CALL_INSN or non-target operands of
998
   a JUMP_INSN, and there is at least one CODE_LABEL referenced in
999
   INSN, add a REG_LABEL_OPERAND note containing that label to INSN.
1000
 
1001
   Note that two labels separated by a loop-beginning note
1002
   must be kept distinct if we have not yet done loop-optimization,
1003
   because the gap between them is where loop-optimize
1004
   will want to move invariant code to.  CROSS_JUMP tells us
1005
   that loop-optimization is done with.  */
1006
 
1007
void
1008
mark_jump_label (rtx x, rtx insn, int in_mem)
1009
{
1010
  rtx asmop = extract_asm_operands (x);
1011
  if (asmop)
1012
    mark_jump_label_asm (asmop, insn);
1013
  else
1014
    mark_jump_label_1 (x, insn, in_mem != 0,
1015
                       (insn != NULL && x == PATTERN (insn) && JUMP_P (insn)));
1016
}
1017
 
1018
/* Worker function for mark_jump_label.  IN_MEM is TRUE when X occurs
1019
   within a (MEM ...).  IS_TARGET is TRUE when X is to be treated as a
1020
   jump-target; when the JUMP_LABEL field of INSN should be set or a
1021
   REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND
1022
   note.  */
1023
 
1024
static void
1025
mark_jump_label_1 (rtx x, rtx insn, bool in_mem, bool is_target)
1026
{
1027
  RTX_CODE code = GET_CODE (x);
1028
  int i;
1029
  const char *fmt;
1030
 
1031
  switch (code)
1032
    {
1033
    case PC:
1034
    case CC0:
1035
    case REG:
1036
    case CONST_INT:
1037
    case CONST_DOUBLE:
1038
    case CLOBBER:
1039
    case CALL:
1040
      return;
1041
 
1042
    case MEM:
1043
      in_mem = true;
1044
      break;
1045
 
1046
    case SEQUENCE:
1047
      for (i = 0; i < XVECLEN (x, 0); i++)
1048
        mark_jump_label (PATTERN (XVECEXP (x, 0, i)),
1049
                         XVECEXP (x, 0, i), 0);
1050
      return;
1051
 
1052
    case SYMBOL_REF:
1053
      if (!in_mem)
1054
        return;
1055
 
1056
      /* If this is a constant-pool reference, see if it is a label.  */
1057
      if (CONSTANT_POOL_ADDRESS_P (x))
1058
        mark_jump_label_1 (get_pool_constant (x), insn, in_mem, is_target);
1059
      break;
1060
 
1061
      /* Handle operands in the condition of an if-then-else as for a
1062
         non-jump insn.  */
1063
    case IF_THEN_ELSE:
1064
      if (!is_target)
1065
        break;
1066
      mark_jump_label_1 (XEXP (x, 0), insn, in_mem, false);
1067
      mark_jump_label_1 (XEXP (x, 1), insn, in_mem, true);
1068
      mark_jump_label_1 (XEXP (x, 2), insn, in_mem, true);
1069
      return;
1070
 
1071
    case LABEL_REF:
1072
      {
1073
        rtx label = XEXP (x, 0);
1074
 
1075
        /* Ignore remaining references to unreachable labels that
1076
           have been deleted.  */
1077
        if (NOTE_P (label)
1078
            && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
1079
          break;
1080
 
1081
        gcc_assert (LABEL_P (label));
1082
 
1083
        /* Ignore references to labels of containing functions.  */
1084
        if (LABEL_REF_NONLOCAL_P (x))
1085
          break;
1086
 
1087
        XEXP (x, 0) = label;
1088
        if (! insn || ! INSN_DELETED_P (insn))
1089
          ++LABEL_NUSES (label);
1090
 
1091
        if (insn)
1092
          {
1093
            if (is_target
1094
                /* Do not change a previous setting of JUMP_LABEL.  If the
1095
                   JUMP_LABEL slot is occupied by a different label,
1096
                   create a note for this label.  */
1097
                && (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == label))
1098
              JUMP_LABEL (insn) = label;
1099
            else
1100
              {
1101
                enum reg_note kind
1102
                  = is_target ? REG_LABEL_TARGET : REG_LABEL_OPERAND;
1103
 
1104
                /* Add a REG_LABEL_OPERAND or REG_LABEL_TARGET note
1105
                   for LABEL unless there already is one.  All uses of
1106
                   a label, except for the primary target of a jump,
1107
                   must have such a note.  */
1108
                if (! find_reg_note (insn, kind, label))
1109
                  add_reg_note (insn, kind, label);
1110
              }
1111
          }
1112
        return;
1113
      }
1114
 
1115
  /* Do walk the labels in a vector, but not the first operand of an
1116
     ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
1117
    case ADDR_VEC:
1118
    case ADDR_DIFF_VEC:
1119
      if (! INSN_DELETED_P (insn))
1120
        {
1121
          int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1122
 
1123
          for (i = 0; i < XVECLEN (x, eltnum); i++)
1124
            mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL_RTX, in_mem,
1125
                               is_target);
1126
        }
1127
      return;
1128
 
1129
    default:
1130
      break;
1131
    }
1132
 
1133
  fmt = GET_RTX_FORMAT (code);
1134
 
1135
  /* The primary target of a tablejump is the label of the ADDR_VEC,
1136
     which is canonically mentioned *last* in the insn.  To get it
1137
     marked as JUMP_LABEL, we iterate over items in reverse order.  */
1138
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1139
    {
1140
      if (fmt[i] == 'e')
1141
        mark_jump_label_1 (XEXP (x, i), insn, in_mem, is_target);
1142
      else if (fmt[i] == 'E')
1143
        {
1144
          int j;
1145
 
1146
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1147
            mark_jump_label_1 (XVECEXP (x, i, j), insn, in_mem,
1148
                               is_target);
1149
        }
1150
    }
1151
}
1152
 
1153
/* Worker function for mark_jump_label.  Handle asm insns specially.
1154
   In particular, output operands need not be considered so we can
1155
   avoid re-scanning the replicated asm_operand.  Also, the asm_labels
1156
   need to be considered targets.  */
1157
 
1158
static void
1159
mark_jump_label_asm (rtx asmop, rtx insn)
1160
{
1161
  int i;
1162
 
1163
  for (i = ASM_OPERANDS_INPUT_LENGTH (asmop) - 1; i >= 0; --i)
1164
    mark_jump_label_1 (ASM_OPERANDS_INPUT (asmop, i), insn, false, false);
1165
 
1166
  for (i = ASM_OPERANDS_LABEL_LENGTH (asmop) - 1; i >= 0; --i)
1167
    mark_jump_label_1 (ASM_OPERANDS_LABEL (asmop, i), insn, false, true);
1168
}
1169
 
1170
/* Delete insn INSN from the chain of insns and update label ref counts
1171
   and delete insns now unreachable.
1172
 
1173
   Returns the first insn after INSN that was not deleted.
1174
 
1175
   Usage of this instruction is deprecated.  Use delete_insn instead and
1176
   subsequent cfg_cleanup pass to delete unreachable code if needed.  */
1177
 
1178
rtx
1179
delete_related_insns (rtx insn)
1180
{
1181
  int was_code_label = (LABEL_P (insn));
1182
  rtx note;
1183
  rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1184
 
1185
  while (next && INSN_DELETED_P (next))
1186
    next = NEXT_INSN (next);
1187
 
1188
  /* This insn is already deleted => return first following nondeleted.  */
1189
  if (INSN_DELETED_P (insn))
1190
    return next;
1191
 
1192
  delete_insn (insn);
1193
 
1194
  /* If instruction is followed by a barrier,
1195
     delete the barrier too.  */
1196
 
1197
  if (next != 0 && BARRIER_P (next))
1198
    delete_insn (next);
1199
 
1200
  /* If deleting a jump, decrement the count of the label,
1201
     and delete the label if it is now unused.  */
1202
 
1203
  if (JUMP_P (insn) && JUMP_LABEL (insn))
1204
    {
1205
      rtx lab = JUMP_LABEL (insn), lab_next;
1206
 
1207
      if (LABEL_NUSES (lab) == 0)
1208
        /* This can delete NEXT or PREV,
1209
           either directly if NEXT is JUMP_LABEL (INSN),
1210
           or indirectly through more levels of jumps.  */
1211
        delete_related_insns (lab);
1212
      else if (tablejump_p (insn, NULL, &lab_next))
1213
        {
1214
          /* If we're deleting the tablejump, delete the dispatch table.
1215
             We may not be able to kill the label immediately preceding
1216
             just yet, as it might be referenced in code leading up to
1217
             the tablejump.  */
1218
          delete_related_insns (lab_next);
1219
        }
1220
    }
1221
 
1222
  /* Likewise if we're deleting a dispatch table.  */
1223
 
1224
  if (JUMP_TABLE_DATA_P (insn))
1225
    {
1226
      rtx pat = PATTERN (insn);
1227
      int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1228
      int len = XVECLEN (pat, diff_vec_p);
1229
 
1230
      for (i = 0; i < len; i++)
1231
        if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1232
          delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1233
      while (next && INSN_DELETED_P (next))
1234
        next = NEXT_INSN (next);
1235
      return next;
1236
    }
1237
 
1238
  /* Likewise for any JUMP_P / INSN / CALL_INSN with a
1239
     REG_LABEL_OPERAND or REG_LABEL_TARGET note.  */
1240
  if (INSN_P (insn))
1241
    for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1242
      if ((REG_NOTE_KIND (note) == REG_LABEL_OPERAND
1243
           || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
1244
          /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1245
          && LABEL_P (XEXP (note, 0)))
1246
        if (LABEL_NUSES (XEXP (note, 0)) == 0)
1247
          delete_related_insns (XEXP (note, 0));
1248
 
1249
  while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
1250
    prev = PREV_INSN (prev);
1251
 
1252
  /* If INSN was a label and a dispatch table follows it,
1253
     delete the dispatch table.  The tablejump must have gone already.
1254
     It isn't useful to fall through into a table.  */
1255
 
1256
  if (was_code_label
1257
      && NEXT_INSN (insn) != 0
1258
      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
1259
    next = delete_related_insns (NEXT_INSN (insn));
1260
 
1261
  /* If INSN was a label, delete insns following it if now unreachable.  */
1262
 
1263
  if (was_code_label && prev && BARRIER_P (prev))
1264
    {
1265
      enum rtx_code code;
1266
      while (next)
1267
        {
1268
          code = GET_CODE (next);
1269
          if (code == NOTE)
1270
            next = NEXT_INSN (next);
1271
          /* Keep going past other deleted labels to delete what follows.  */
1272
          else if (code == CODE_LABEL && INSN_DELETED_P (next))
1273
            next = NEXT_INSN (next);
1274
          else if (code == BARRIER || INSN_P (next))
1275
            /* Note: if this deletes a jump, it can cause more
1276
               deletion of unreachable code, after a different label.
1277
               As long as the value from this recursive call is correct,
1278
               this invocation functions correctly.  */
1279
            next = delete_related_insns (next);
1280
          else
1281
            break;
1282
        }
1283
    }
1284
 
1285
  /* I feel a little doubtful about this loop,
1286
     but I see no clean and sure alternative way
1287
     to find the first insn after INSN that is not now deleted.
1288
     I hope this works.  */
1289
  while (next && INSN_DELETED_P (next))
1290
    next = NEXT_INSN (next);
1291
  return next;
1292
}
1293
 
1294
/* Delete a range of insns from FROM to TO, inclusive.
1295
   This is for the sake of peephole optimization, so assume
1296
   that whatever these insns do will still be done by a new
1297
   peephole insn that will replace them.  */
1298
 
1299
void
1300
delete_for_peephole (rtx from, rtx to)
1301
{
1302
  rtx insn = from;
1303
 
1304
  while (1)
1305
    {
1306
      rtx next = NEXT_INSN (insn);
1307
      rtx prev = PREV_INSN (insn);
1308
 
1309
      if (!NOTE_P (insn))
1310
        {
1311
          INSN_DELETED_P (insn) = 1;
1312
 
1313
          /* Patch this insn out of the chain.  */
1314
          /* We don't do this all at once, because we
1315
             must preserve all NOTEs.  */
1316
          if (prev)
1317
            NEXT_INSN (prev) = next;
1318
 
1319
          if (next)
1320
            PREV_INSN (next) = prev;
1321
        }
1322
 
1323
      if (insn == to)
1324
        break;
1325
      insn = next;
1326
    }
1327
 
1328
  /* Note that if TO is an unconditional jump
1329
     we *do not* delete the BARRIER that follows,
1330
     since the peephole that replaces this sequence
1331
     is also an unconditional jump in that case.  */
1332
}
1333
 
1334
/* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
1335
   NLABEL as a return.  Accrue modifications into the change group.  */
1336
 
1337
static void
1338
redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1339
{
1340
  rtx x = *loc;
1341
  RTX_CODE code = GET_CODE (x);
1342
  int i;
1343
  const char *fmt;
1344
 
1345
  if (code == LABEL_REF)
1346
    {
1347
      if (XEXP (x, 0) == olabel)
1348
        {
1349
          rtx n;
1350
          if (nlabel)
1351
            n = gen_rtx_LABEL_REF (Pmode, nlabel);
1352
          else
1353
            n = gen_rtx_RETURN (VOIDmode);
1354
 
1355
          validate_change (insn, loc, n, 1);
1356
          return;
1357
        }
1358
    }
1359
  else if (code == RETURN && olabel == 0)
1360
    {
1361
      if (nlabel)
1362
        x = gen_rtx_LABEL_REF (Pmode, nlabel);
1363
      else
1364
        x = gen_rtx_RETURN (VOIDmode);
1365
      if (loc == &PATTERN (insn))
1366
        x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1367
      validate_change (insn, loc, x, 1);
1368
      return;
1369
    }
1370
 
1371
  if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1372
      && GET_CODE (SET_SRC (x)) == LABEL_REF
1373
      && XEXP (SET_SRC (x), 0) == olabel)
1374
    {
1375
      validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1376
      return;
1377
    }
1378
 
1379
  if (code == IF_THEN_ELSE)
1380
    {
1381
      /* Skip the condition of an IF_THEN_ELSE.  We only want to
1382
         change jump destinations, not eventual label comparisons.  */
1383
      redirect_exp_1 (&XEXP (x, 1), olabel, nlabel, insn);
1384
      redirect_exp_1 (&XEXP (x, 2), olabel, nlabel, insn);
1385
      return;
1386
    }
1387
 
1388
  fmt = GET_RTX_FORMAT (code);
1389
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1390
    {
1391
      if (fmt[i] == 'e')
1392
        redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1393
      else if (fmt[i] == 'E')
1394
        {
1395
          int j;
1396
          for (j = 0; j < XVECLEN (x, i); j++)
1397
            redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1398
        }
1399
    }
1400
}
1401
 
1402
/* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
1403
   the modifications into the change group.  Return false if we did
1404
   not see how to do that.  */
1405
 
1406
int
1407
redirect_jump_1 (rtx jump, rtx nlabel)
1408
{
1409
  int ochanges = num_validated_changes ();
1410
  rtx *loc, asmop;
1411
 
1412
  asmop = extract_asm_operands (PATTERN (jump));
1413
  if (asmop)
1414
    {
1415
      if (nlabel == NULL)
1416
        return 0;
1417
      gcc_assert (ASM_OPERANDS_LABEL_LENGTH (asmop) == 1);
1418
      loc = &ASM_OPERANDS_LABEL (asmop, 0);
1419
    }
1420
  else if (GET_CODE (PATTERN (jump)) == PARALLEL)
1421
    loc = &XVECEXP (PATTERN (jump), 0, 0);
1422
  else
1423
    loc = &PATTERN (jump);
1424
 
1425
  redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1426
  return num_validated_changes () > ochanges;
1427
}
1428
 
1429
/* Make JUMP go to NLABEL instead of where it jumps now.  If the old
1430
   jump target label is unused as a result, it and the code following
1431
   it may be deleted.
1432
 
1433
   If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1434
   RETURN insn.
1435
 
1436
   The return value will be 1 if the change was made, 0 if it wasn't
1437
   (this can only occur for NLABEL == 0).  */
1438
 
1439
int
1440
redirect_jump (rtx jump, rtx nlabel, int delete_unused)
1441
{
1442
  rtx olabel = JUMP_LABEL (jump);
1443
 
1444
  if (nlabel == olabel)
1445
    return 1;
1446
 
1447
  if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1448
    return 0;
1449
 
1450
  redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1451
  return 1;
1452
}
1453
 
1454
/* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1455
   NLABEL in JUMP.
1456
   If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1457
   count has dropped to zero.  */
1458
void
1459
redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
1460
                 int invert)
1461
{
1462
  rtx note;
1463
 
1464
  gcc_assert (JUMP_LABEL (jump) == olabel);
1465
 
1466
  /* Negative DELETE_UNUSED used to be used to signalize behavior on
1467
     moving FUNCTION_END note.  Just sanity check that no user still worry
1468
     about this.  */
1469
  gcc_assert (delete_unused >= 0);
1470
  JUMP_LABEL (jump) = nlabel;
1471
  if (nlabel)
1472
    ++LABEL_NUSES (nlabel);
1473
 
1474
  /* Update labels in any REG_EQUAL note.  */
1475
  if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1476
    {
1477
      if (!nlabel || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1478
        remove_note (jump, note);
1479
      else
1480
        {
1481
          redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1482
          confirm_change_group ();
1483
        }
1484
    }
1485
 
1486
  if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1487
      /* Undefined labels will remain outside the insn stream.  */
1488
      && INSN_UID (olabel))
1489
    delete_related_insns (olabel);
1490
  if (invert)
1491
    invert_br_probabilities (jump);
1492
}
1493
 
1494
/* Invert the jump condition X contained in jump insn INSN.  Accrue the
1495
   modifications into the change group.  Return nonzero for success.  */
1496
static int
1497
invert_exp_1 (rtx x, rtx insn)
1498
{
1499
  RTX_CODE code = GET_CODE (x);
1500
 
1501
  if (code == IF_THEN_ELSE)
1502
    {
1503
      rtx comp = XEXP (x, 0);
1504
      rtx tem;
1505
      enum rtx_code reversed_code;
1506
 
1507
      /* We can do this in two ways:  The preferable way, which can only
1508
         be done if this is not an integer comparison, is to reverse
1509
         the comparison code.  Otherwise, swap the THEN-part and ELSE-part
1510
         of the IF_THEN_ELSE.  If we can't do either, fail.  */
1511
 
1512
      reversed_code = reversed_comparison_code (comp, insn);
1513
 
1514
      if (reversed_code != UNKNOWN)
1515
        {
1516
          validate_change (insn, &XEXP (x, 0),
1517
                           gen_rtx_fmt_ee (reversed_code,
1518
                                           GET_MODE (comp), XEXP (comp, 0),
1519
                                           XEXP (comp, 1)),
1520
                           1);
1521
          return 1;
1522
        }
1523
 
1524
      tem = XEXP (x, 1);
1525
      validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1526
      validate_change (insn, &XEXP (x, 2), tem, 1);
1527
      return 1;
1528
    }
1529
  else
1530
    return 0;
1531
}
1532
 
1533
/* Invert the condition of the jump JUMP, and make it jump to label
1534
   NLABEL instead of where it jumps now.  Accrue changes into the
1535
   change group.  Return false if we didn't see how to perform the
1536
   inversion and redirection.  */
1537
 
1538
int
1539
invert_jump_1 (rtx jump, rtx nlabel)
1540
{
1541
  rtx x = pc_set (jump);
1542
  int ochanges;
1543
  int ok;
1544
 
1545
  ochanges = num_validated_changes ();
1546
  if (x == NULL)
1547
    return 0;
1548
  ok = invert_exp_1 (SET_SRC (x), jump);
1549
  gcc_assert (ok);
1550
 
1551
  if (num_validated_changes () == ochanges)
1552
    return 0;
1553
 
1554
  /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1555
     in Pmode, so checking this is not merely an optimization.  */
1556
  return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1557
}
1558
 
1559
/* Invert the condition of the jump JUMP, and make it jump to label
1560
   NLABEL instead of where it jumps now.  Return true if successful.  */
1561
 
1562
int
1563
invert_jump (rtx jump, rtx nlabel, int delete_unused)
1564
{
1565
  rtx olabel = JUMP_LABEL (jump);
1566
 
1567
  if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1568
    {
1569
      redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1570
      return 1;
1571
    }
1572
  cancel_changes (0);
1573
  return 0;
1574
}
1575
 
1576
 
1577
/* Like rtx_equal_p except that it considers two REGs as equal
1578
   if they renumber to the same value and considers two commutative
1579
   operations to be the same if the order of the operands has been
1580
   reversed.  */
1581
 
1582
int
1583
rtx_renumbered_equal_p (const_rtx x, const_rtx y)
1584
{
1585
  int i;
1586
  const enum rtx_code code = GET_CODE (x);
1587
  const char *fmt;
1588
 
1589
  if (x == y)
1590
    return 1;
1591
 
1592
  if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1593
      && (REG_P (y) || (GET_CODE (y) == SUBREG
1594
                                  && REG_P (SUBREG_REG (y)))))
1595
    {
1596
      int reg_x = -1, reg_y = -1;
1597
      int byte_x = 0, byte_y = 0;
1598
      struct subreg_info info;
1599
 
1600
      if (GET_MODE (x) != GET_MODE (y))
1601
        return 0;
1602
 
1603
      /* If we haven't done any renumbering, don't
1604
         make any assumptions.  */
1605
      if (reg_renumber == 0)
1606
        return rtx_equal_p (x, y);
1607
 
1608
      if (code == SUBREG)
1609
        {
1610
          reg_x = REGNO (SUBREG_REG (x));
1611
          byte_x = SUBREG_BYTE (x);
1612
 
1613
          if (reg_renumber[reg_x] >= 0)
1614
            {
1615
              subreg_get_info (reg_renumber[reg_x],
1616
                               GET_MODE (SUBREG_REG (x)), byte_x,
1617
                               GET_MODE (x), &info);
1618
              if (!info.representable_p)
1619
                return 0;
1620
              reg_x = info.offset;
1621
              byte_x = 0;
1622
            }
1623
        }
1624
      else
1625
        {
1626
          reg_x = REGNO (x);
1627
          if (reg_renumber[reg_x] >= 0)
1628
            reg_x = reg_renumber[reg_x];
1629
        }
1630
 
1631
      if (GET_CODE (y) == SUBREG)
1632
        {
1633
          reg_y = REGNO (SUBREG_REG (y));
1634
          byte_y = SUBREG_BYTE (y);
1635
 
1636
          if (reg_renumber[reg_y] >= 0)
1637
            {
1638
              subreg_get_info (reg_renumber[reg_y],
1639
                               GET_MODE (SUBREG_REG (y)), byte_y,
1640
                               GET_MODE (y), &info);
1641
              if (!info.representable_p)
1642
                return 0;
1643
              reg_y = info.offset;
1644
              byte_y = 0;
1645
            }
1646
        }
1647
      else
1648
        {
1649
          reg_y = REGNO (y);
1650
          if (reg_renumber[reg_y] >= 0)
1651
            reg_y = reg_renumber[reg_y];
1652
        }
1653
 
1654
      return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1655
    }
1656
 
1657
  /* Now we have disposed of all the cases
1658
     in which different rtx codes can match.  */
1659
  if (code != GET_CODE (y))
1660
    return 0;
1661
 
1662
  switch (code)
1663
    {
1664
    case PC:
1665
    case CC0:
1666
    case ADDR_VEC:
1667
    case ADDR_DIFF_VEC:
1668
    case CONST_INT:
1669
    case CONST_DOUBLE:
1670
      return 0;
1671
 
1672
    case LABEL_REF:
1673
      /* We can't assume nonlocal labels have their following insns yet.  */
1674
      if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1675
        return XEXP (x, 0) == XEXP (y, 0);
1676
 
1677
      /* Two label-refs are equivalent if they point at labels
1678
         in the same position in the instruction stream.  */
1679
      return (next_real_insn (XEXP (x, 0))
1680
              == next_real_insn (XEXP (y, 0)));
1681
 
1682
    case SYMBOL_REF:
1683
      return XSTR (x, 0) == XSTR (y, 0);
1684
 
1685
    case CODE_LABEL:
1686
      /* If we didn't match EQ equality above, they aren't the same.  */
1687
      return 0;
1688
 
1689
    default:
1690
      break;
1691
    }
1692
 
1693
  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
1694
 
1695
  if (GET_MODE (x) != GET_MODE (y))
1696
    return 0;
1697
 
1698
  /* MEMs refering to different address space are not equivalent.  */
1699
  if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
1700
    return 0;
1701
 
1702
  /* For commutative operations, the RTX match if the operand match in any
1703
     order.  Also handle the simple binary and unary cases without a loop.  */
1704
  if (targetm.commutative_p (x, UNKNOWN))
1705
    return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1706
             && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1707
            || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1708
                && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1709
  else if (NON_COMMUTATIVE_P (x))
1710
    return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1711
            && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1712
  else if (UNARY_P (x))
1713
    return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1714
 
1715
  /* Compare the elements.  If any pair of corresponding elements
1716
     fail to match, return 0 for the whole things.  */
1717
 
1718
  fmt = GET_RTX_FORMAT (code);
1719
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1720
    {
1721
      int j;
1722
      switch (fmt[i])
1723
        {
1724
        case 'w':
1725
          if (XWINT (x, i) != XWINT (y, i))
1726
            return 0;
1727
          break;
1728
 
1729
        case 'i':
1730
          if (XINT (x, i) != XINT (y, i))
1731
            return 0;
1732
          break;
1733
 
1734
        case 't':
1735
          if (XTREE (x, i) != XTREE (y, i))
1736
            return 0;
1737
          break;
1738
 
1739
        case 's':
1740
          if (strcmp (XSTR (x, i), XSTR (y, i)))
1741
            return 0;
1742
          break;
1743
 
1744
        case 'e':
1745
          if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1746
            return 0;
1747
          break;
1748
 
1749
        case 'u':
1750
          if (XEXP (x, i) != XEXP (y, i))
1751
            return 0;
1752
          /* Fall through.  */
1753
        case '0':
1754
          break;
1755
 
1756
        case 'E':
1757
          if (XVECLEN (x, i) != XVECLEN (y, i))
1758
            return 0;
1759
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1760
            if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1761
              return 0;
1762
          break;
1763
 
1764
        default:
1765
          gcc_unreachable ();
1766
        }
1767
    }
1768
  return 1;
1769
}
1770
 
1771
/* If X is a hard register or equivalent to one or a subregister of one,
1772
   return the hard register number.  If X is a pseudo register that was not
1773
   assigned a hard register, return the pseudo register number.  Otherwise,
1774
   return -1.  Any rtx is valid for X.  */
1775
 
1776
int
1777
true_regnum (const_rtx x)
1778
{
1779
  if (REG_P (x))
1780
    {
1781
      if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
1782
        return reg_renumber[REGNO (x)];
1783
      return REGNO (x);
1784
    }
1785
  if (GET_CODE (x) == SUBREG)
1786
    {
1787
      int base = true_regnum (SUBREG_REG (x));
1788
      if (base >= 0
1789
          && base < FIRST_PSEUDO_REGISTER)
1790
        {
1791
          struct subreg_info info;
1792
 
1793
          subreg_get_info (REGNO (SUBREG_REG (x)),
1794
                           GET_MODE (SUBREG_REG (x)),
1795
                           SUBREG_BYTE (x), GET_MODE (x), &info);
1796
 
1797
          if (info.representable_p)
1798
            return base + info.offset;
1799
        }
1800
    }
1801
  return -1;
1802
}
1803
 
1804
/* Return regno of the register REG and handle subregs too.  */
1805
unsigned int
1806
reg_or_subregno (const_rtx reg)
1807
{
1808
  if (GET_CODE (reg) == SUBREG)
1809
    reg = SUBREG_REG (reg);
1810
  gcc_assert (REG_P (reg));
1811
  return REGNO (reg);
1812
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.