OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [jump.c] - Blame information for rev 16

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Optimize jump instructions, for GNU compiler.
2
   Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3
   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 2, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING.  If not, write to the Free
20
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21
02110-1301, USA.  */
22
 
23
/* This is the pathetic reminder of old fame of the jump-optimization pass
24
   of the compiler.  Now it contains basically set of utility function to
25
   operate with jumps.
26
 
27
   Each CODE_LABEL has a count of the times it is used
28
   stored in the LABEL_NUSES internal field, and each JUMP_INSN
29
   has one label that it refers to stored in the
30
   JUMP_LABEL internal field.  With this we can detect labels that
31
   become unused because of the deletion of all the jumps that
32
   formerly used them.  The JUMP_LABEL info is sometimes looked
33
   at by later passes.
34
 
35
   The subroutines redirect_jump and invert_jump are used
36
   from other passes as well.  */
37
 
38
#include "config.h"
39
#include "system.h"
40
#include "coretypes.h"
41
#include "tm.h"
42
#include "rtl.h"
43
#include "tm_p.h"
44
#include "flags.h"
45
#include "hard-reg-set.h"
46
#include "regs.h"
47
#include "insn-config.h"
48
#include "insn-attr.h"
49
#include "recog.h"
50
#include "function.h"
51
#include "expr.h"
52
#include "real.h"
53
#include "except.h"
54
#include "diagnostic.h"
55
#include "toplev.h"
56
#include "reload.h"
57
#include "predict.h"
58
#include "timevar.h"
59
#include "tree-pass.h"
60
#include "target.h"
61
 
62
/* Optimize jump y; x: ... y: jumpif... x?
63
   Don't know if it is worth bothering with.  */
64
/* Optimize two cases of conditional jump to conditional jump?
65
   This can never delete any instruction or make anything dead,
66
   or even change what is live at any point.
67
   So perhaps let combiner do it.  */
68
 
69
static void init_label_info (rtx);
70
static void mark_all_labels (rtx);
71
static void delete_computation (rtx);
72
static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
73
static int invert_exp_1 (rtx, rtx);
74
static int returnjump_p_1 (rtx *, void *);
75
static void delete_prior_computation (rtx, rtx);
76
 
77
/* Alternate entry into the jump optimizer.  This entry point only rebuilds
78
   the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
79
   instructions.  */
80
void
81
rebuild_jump_labels (rtx f)
82
{
83
  rtx insn;
84
 
85
  timevar_push (TV_REBUILD_JUMP);
86
  init_label_info (f);
87
  mark_all_labels (f);
88
 
89
  /* Keep track of labels used from static data; we don't track them
90
     closely enough to delete them here, so make sure their reference
91
     count doesn't drop to zero.  */
92
 
93
  for (insn = forced_labels; insn; insn = XEXP (insn, 1))
94
    if (LABEL_P (XEXP (insn, 0)))
95
      LABEL_NUSES (XEXP (insn, 0))++;
96
  timevar_pop (TV_REBUILD_JUMP);
97
}
98
 
99
/* Some old code expects exactly one BARRIER as the NEXT_INSN of a
100
   non-fallthru insn.  This is not generally true, as multiple barriers
101
   may have crept in, or the BARRIER may be separated from the last
102
   real insn by one or more NOTEs.
103
 
104
   This simple pass moves barriers and removes duplicates so that the
105
   old code is happy.
106
 */
107
void
108
cleanup_barriers (void)
109
{
110
  rtx insn, next, prev;
111
  for (insn = get_insns (); insn; insn = next)
112
    {
113
      next = NEXT_INSN (insn);
114
      if (BARRIER_P (insn))
115
        {
116
          prev = prev_nonnote_insn (insn);
117
          if (BARRIER_P (prev))
118
            delete_insn (insn);
119
          else if (prev != PREV_INSN (insn))
120
            reorder_insns (insn, insn, prev);
121
        }
122
    }
123
}
124
 
125
struct tree_opt_pass pass_cleanup_barriers =
126
{
127
  "barriers",                           /* name */
128
  NULL,                                 /* gate */
129
  cleanup_barriers,                     /* execute */
130
  NULL,                                 /* sub */
131
  NULL,                                 /* next */
132
  0,                                    /* static_pass_number */
133
  0,                                    /* tv_id */
134
  0,                                    /* properties_required */
135
  0,                                    /* properties_provided */
136
  0,                                    /* properties_destroyed */
137
  0,                                    /* todo_flags_start */
138
  TODO_dump_func,                       /* todo_flags_finish */
139
 
140
};
141
 
142
void
143
purge_line_number_notes (void)
144
{
145
  rtx last_note = 0;
146
  rtx insn;
147
  /* Delete extraneous line number notes.
148
     Note that two consecutive notes for different lines are not really
149
     extraneous.  There should be some indication where that line belonged,
150
     even if it became empty.  */
151
 
152
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
153
    if (NOTE_P (insn))
154
      {
155
        if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
156
          /* Any previous line note was for the prologue; gdb wants a new
157
             note after the prologue even if it is for the same line.  */
158
          last_note = NULL_RTX;
159
        else if (NOTE_LINE_NUMBER (insn) >= 0)
160
          {
161
            /* Delete this note if it is identical to previous note.  */
162
            if (last_note
163
#ifdef USE_MAPPED_LOCATION
164
                && NOTE_SOURCE_LOCATION (insn) == NOTE_SOURCE_LOCATION (last_note)
165
#else
166
                && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
167
                && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note)
168
#endif
169
)
170
              {
171
                delete_related_insns (insn);
172
                continue;
173
              }
174
 
175
            last_note = insn;
176
          }
177
      }
178
}
179
 
180
struct tree_opt_pass pass_purge_lineno_notes =
181
{
182
  "elnotes",                            /* name */
183
  NULL,                                 /* gate */
184
  purge_line_number_notes,              /* execute */
185
  NULL,                                 /* sub */
186
  NULL,                                 /* next */
187
  0,                                    /* static_pass_number */
188
  0,                                    /* tv_id */
189
  0,                                    /* properties_required */
190
  0,                                    /* properties_provided */
191
  0,                                    /* properties_destroyed */
192
  0,                                    /* todo_flags_start */
193
  TODO_dump_func,                       /* todo_flags_finish */
194
 
195
};
196
 
197
 
198
/* Initialize LABEL_NUSES and JUMP_LABEL fields.  Delete any REG_LABEL
199
   notes whose labels don't occur in the insn any more.  Returns the
200
   largest INSN_UID found.  */
201
static void
202
init_label_info (rtx f)
203
{
204
  rtx insn;
205
 
206
  for (insn = f; insn; insn = NEXT_INSN (insn))
207
    if (LABEL_P (insn))
208
      LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
209
    else if (JUMP_P (insn))
210
      JUMP_LABEL (insn) = 0;
211
    else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
212
      {
213
        rtx note, next;
214
 
215
        for (note = REG_NOTES (insn); note; note = next)
216
          {
217
            next = XEXP (note, 1);
218
            if (REG_NOTE_KIND (note) == REG_LABEL
219
                && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
220
              remove_note (insn, note);
221
          }
222
      }
223
}
224
 
225
/* Mark the label each jump jumps to.
226
   Combine consecutive labels, and count uses of labels.  */
227
 
228
static void
229
mark_all_labels (rtx f)
230
{
231
  rtx insn;
232
 
233
  for (insn = f; insn; insn = NEXT_INSN (insn))
234
    if (INSN_P (insn))
235
      {
236
        mark_jump_label (PATTERN (insn), insn, 0);
237
        if (! INSN_DELETED_P (insn) && JUMP_P (insn))
238
          {
239
            /* When we know the LABEL_REF contained in a REG used in
240
               an indirect jump, we'll have a REG_LABEL note so that
241
               flow can tell where it's going.  */
242
            if (JUMP_LABEL (insn) == 0)
243
              {
244
                rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
245
                if (label_note)
246
                  {
247
                    /* But a LABEL_REF around the REG_LABEL note, so
248
                       that we can canonicalize it.  */
249
                    rtx label_ref = gen_rtx_LABEL_REF (Pmode,
250
                                                       XEXP (label_note, 0));
251
 
252
                    mark_jump_label (label_ref, insn, 0);
253
                    XEXP (label_note, 0) = XEXP (label_ref, 0);
254
                    JUMP_LABEL (insn) = XEXP (label_note, 0);
255
                  }
256
              }
257
          }
258
      }
259
}
260
 
261
/* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
262
   notes between START and END out before START.  START and END may be such
263
   notes.  Returns the values of the new starting and ending insns, which
264
   may be different if the original ones were such notes.
265
   Return true if there were only such notes and no real instructions.  */
266
 
267
bool
268
squeeze_notes (rtx* startp, rtx* endp)
269
{
270
  rtx start = *startp;
271
  rtx end = *endp;
272
 
273
  rtx insn;
274
  rtx next;
275
  rtx last = NULL;
276
  rtx past_end = NEXT_INSN (end);
277
 
278
  for (insn = start; insn != past_end; insn = next)
279
    {
280
      next = NEXT_INSN (insn);
281
      if (NOTE_P (insn)
282
          && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
283
              || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
284
              || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
285
              || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END))
286
        {
287
          /* BLOCK_BEG or BLOCK_END notes only exist in the `final' pass.  */
288
          gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
289
                      && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
290
 
291
          if (insn == start)
292
            start = next;
293
          else
294
            {
295
              rtx prev = PREV_INSN (insn);
296
              PREV_INSN (insn) = PREV_INSN (start);
297
              NEXT_INSN (insn) = start;
298
              NEXT_INSN (PREV_INSN (insn)) = insn;
299
              PREV_INSN (NEXT_INSN (insn)) = insn;
300
              NEXT_INSN (prev) = next;
301
              PREV_INSN (next) = prev;
302
            }
303
        }
304
      else
305
        last = insn;
306
    }
307
 
308
  /* There were no real instructions.  */
309
  if (start == past_end)
310
    return true;
311
 
312
  end = last;
313
 
314
  *startp = start;
315
  *endp = end;
316
  return false;
317
}
318
 
319
/* Return the label before INSN, or put a new label there.  */
320
 
321
rtx
322
get_label_before (rtx insn)
323
{
324
  rtx label;
325
 
326
  /* Find an existing label at this point
327
     or make a new one if there is none.  */
328
  label = prev_nonnote_insn (insn);
329
 
330
  if (label == 0 || !LABEL_P (label))
331
    {
332
      rtx prev = PREV_INSN (insn);
333
 
334
      label = gen_label_rtx ();
335
      emit_label_after (label, prev);
336
      LABEL_NUSES (label) = 0;
337
    }
338
  return label;
339
}
340
 
341
/* Return the label after INSN, or put a new label there.  */
342
 
343
rtx
344
get_label_after (rtx insn)
345
{
346
  rtx label;
347
 
348
  /* Find an existing label at this point
349
     or make a new one if there is none.  */
350
  label = next_nonnote_insn (insn);
351
 
352
  if (label == 0 || !LABEL_P (label))
353
    {
354
      label = gen_label_rtx ();
355
      emit_label_after (label, insn);
356
      LABEL_NUSES (label) = 0;
357
    }
358
  return label;
359
}
360
 
361
/* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
362
   of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
363
   UNKNOWN may be returned in case we are having CC_MODE compare and we don't
364
   know whether it's source is floating point or integer comparison.  Machine
365
   description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
366
   to help this function avoid overhead in these cases.  */
367
enum rtx_code
368
reversed_comparison_code_parts (enum rtx_code code, rtx arg0, rtx arg1, rtx insn)
369
{
370
  enum machine_mode mode;
371
 
372
  /* If this is not actually a comparison, we can't reverse it.  */
373
  if (GET_RTX_CLASS (code) != RTX_COMPARE
374
      && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
375
    return UNKNOWN;
376
 
377
  mode = GET_MODE (arg0);
378
  if (mode == VOIDmode)
379
    mode = GET_MODE (arg1);
380
 
381
  /* First see if machine description supplies us way to reverse the
382
     comparison.  Give it priority over everything else to allow
383
     machine description to do tricks.  */
384
  if (GET_MODE_CLASS (mode) == MODE_CC
385
      && REVERSIBLE_CC_MODE (mode))
386
    {
387
#ifdef REVERSE_CONDITION
388
      return REVERSE_CONDITION (code, mode);
389
#endif
390
      return reverse_condition (code);
391
    }
392
 
393
  /* Try a few special cases based on the comparison code.  */
394
  switch (code)
395
    {
396
    case GEU:
397
    case GTU:
398
    case LEU:
399
    case LTU:
400
    case NE:
401
    case EQ:
402
      /* It is always safe to reverse EQ and NE, even for the floating
403
         point.  Similarly the unsigned comparisons are never used for
404
         floating point so we can reverse them in the default way.  */
405
      return reverse_condition (code);
406
    case ORDERED:
407
    case UNORDERED:
408
    case LTGT:
409
    case UNEQ:
410
      /* In case we already see unordered comparison, we can be sure to
411
         be dealing with floating point so we don't need any more tests.  */
412
      return reverse_condition_maybe_unordered (code);
413
    case UNLT:
414
    case UNLE:
415
    case UNGT:
416
    case UNGE:
417
      /* We don't have safe way to reverse these yet.  */
418
      return UNKNOWN;
419
    default:
420
      break;
421
    }
422
 
423
  if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
424
    {
425
      rtx prev;
426
      /* Try to search for the comparison to determine the real mode.
427
         This code is expensive, but with sane machine description it
428
         will be never used, since REVERSIBLE_CC_MODE will return true
429
         in all cases.  */
430
      if (! insn)
431
        return UNKNOWN;
432
 
433
      for (prev = prev_nonnote_insn (insn);
434
           prev != 0 && !LABEL_P (prev);
435
           prev = prev_nonnote_insn (prev))
436
        {
437
          rtx set = set_of (arg0, prev);
438
          if (set && GET_CODE (set) == SET
439
              && rtx_equal_p (SET_DEST (set), arg0))
440
            {
441
              rtx src = SET_SRC (set);
442
 
443
              if (GET_CODE (src) == COMPARE)
444
                {
445
                  rtx comparison = src;
446
                  arg0 = XEXP (src, 0);
447
                  mode = GET_MODE (arg0);
448
                  if (mode == VOIDmode)
449
                    mode = GET_MODE (XEXP (comparison, 1));
450
                  break;
451
                }
452
              /* We can get past reg-reg moves.  This may be useful for model
453
                 of i387 comparisons that first move flag registers around.  */
454
              if (REG_P (src))
455
                {
456
                  arg0 = src;
457
                  continue;
458
                }
459
            }
460
          /* If register is clobbered in some ununderstandable way,
461
             give up.  */
462
          if (set)
463
            return UNKNOWN;
464
        }
465
    }
466
 
467
  /* Test for an integer condition, or a floating-point comparison
468
     in which NaNs can be ignored.  */
469
  if (GET_CODE (arg0) == CONST_INT
470
      || (GET_MODE (arg0) != VOIDmode
471
          && GET_MODE_CLASS (mode) != MODE_CC
472
          && !HONOR_NANS (mode)))
473
    return reverse_condition (code);
474
 
475
  return UNKNOWN;
476
}
477
 
478
/* A wrapper around the previous function to take COMPARISON as rtx
479
   expression.  This simplifies many callers.  */
480
enum rtx_code
481
reversed_comparison_code (rtx comparison, rtx insn)
482
{
483
  if (!COMPARISON_P (comparison))
484
    return UNKNOWN;
485
  return reversed_comparison_code_parts (GET_CODE (comparison),
486
                                         XEXP (comparison, 0),
487
                                         XEXP (comparison, 1), insn);
488
}
489
 
490
/* Return comparison with reversed code of EXP.
491
   Return NULL_RTX in case we fail to do the reversal.  */
492
rtx
493
reversed_comparison (rtx exp, enum machine_mode mode)
494
{
495
  enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
496
  if (reversed_code == UNKNOWN)
497
    return NULL_RTX;
498
  else
499
    return simplify_gen_relational (reversed_code, mode, VOIDmode,
500
                                    XEXP (exp, 0), XEXP (exp, 1));
501
}
502
 
503
 
504
/* Given an rtx-code for a comparison, return the code for the negated
505
   comparison.  If no such code exists, return UNKNOWN.
506
 
507
   WATCH OUT!  reverse_condition is not safe to use on a jump that might
508
   be acting on the results of an IEEE floating point comparison, because
509
   of the special treatment of non-signaling nans in comparisons.
510
   Use reversed_comparison_code instead.  */
511
 
512
enum rtx_code
513
reverse_condition (enum rtx_code code)
514
{
515
  switch (code)
516
    {
517
    case EQ:
518
      return NE;
519
    case NE:
520
      return EQ;
521
    case GT:
522
      return LE;
523
    case GE:
524
      return LT;
525
    case LT:
526
      return GE;
527
    case LE:
528
      return GT;
529
    case GTU:
530
      return LEU;
531
    case GEU:
532
      return LTU;
533
    case LTU:
534
      return GEU;
535
    case LEU:
536
      return GTU;
537
    case UNORDERED:
538
      return ORDERED;
539
    case ORDERED:
540
      return UNORDERED;
541
 
542
    case UNLT:
543
    case UNLE:
544
    case UNGT:
545
    case UNGE:
546
    case UNEQ:
547
    case LTGT:
548
      return UNKNOWN;
549
 
550
    default:
551
      gcc_unreachable ();
552
    }
553
}
554
 
555
/* Similar, but we're allowed to generate unordered comparisons, which
556
   makes it safe for IEEE floating-point.  Of course, we have to recognize
557
   that the target will support them too...  */
558
 
559
enum rtx_code
560
reverse_condition_maybe_unordered (enum rtx_code code)
561
{
562
  switch (code)
563
    {
564
    case EQ:
565
      return NE;
566
    case NE:
567
      return EQ;
568
    case GT:
569
      return UNLE;
570
    case GE:
571
      return UNLT;
572
    case LT:
573
      return UNGE;
574
    case LE:
575
      return UNGT;
576
    case LTGT:
577
      return UNEQ;
578
    case UNORDERED:
579
      return ORDERED;
580
    case ORDERED:
581
      return UNORDERED;
582
    case UNLT:
583
      return GE;
584
    case UNLE:
585
      return GT;
586
    case UNGT:
587
      return LE;
588
    case UNGE:
589
      return LT;
590
    case UNEQ:
591
      return LTGT;
592
 
593
    default:
594
      gcc_unreachable ();
595
    }
596
}
597
 
598
/* Similar, but return the code when two operands of a comparison are swapped.
599
   This IS safe for IEEE floating-point.  */
600
 
601
enum rtx_code
602
swap_condition (enum rtx_code code)
603
{
604
  switch (code)
605
    {
606
    case EQ:
607
    case NE:
608
    case UNORDERED:
609
    case ORDERED:
610
    case UNEQ:
611
    case LTGT:
612
      return code;
613
 
614
    case GT:
615
      return LT;
616
    case GE:
617
      return LE;
618
    case LT:
619
      return GT;
620
    case LE:
621
      return GE;
622
    case GTU:
623
      return LTU;
624
    case GEU:
625
      return LEU;
626
    case LTU:
627
      return GTU;
628
    case LEU:
629
      return GEU;
630
    case UNLT:
631
      return UNGT;
632
    case UNLE:
633
      return UNGE;
634
    case UNGT:
635
      return UNLT;
636
    case UNGE:
637
      return UNLE;
638
 
639
    default:
640
      gcc_unreachable ();
641
    }
642
}
643
 
644
/* Given a comparison CODE, return the corresponding unsigned comparison.
645
   If CODE is an equality comparison or already an unsigned comparison,
646
   CODE is returned.  */
647
 
648
enum rtx_code
649
unsigned_condition (enum rtx_code code)
650
{
651
  switch (code)
652
    {
653
    case EQ:
654
    case NE:
655
    case GTU:
656
    case GEU:
657
    case LTU:
658
    case LEU:
659
      return code;
660
 
661
    case GT:
662
      return GTU;
663
    case GE:
664
      return GEU;
665
    case LT:
666
      return LTU;
667
    case LE:
668
      return LEU;
669
 
670
    default:
671
      gcc_unreachable ();
672
    }
673
}
674
 
675
/* Similarly, return the signed version of a comparison.  */
676
 
677
enum rtx_code
678
signed_condition (enum rtx_code code)
679
{
680
  switch (code)
681
    {
682
    case EQ:
683
    case NE:
684
    case GT:
685
    case GE:
686
    case LT:
687
    case LE:
688
      return code;
689
 
690
    case GTU:
691
      return GT;
692
    case GEU:
693
      return GE;
694
    case LTU:
695
      return LT;
696
    case LEU:
697
      return LE;
698
 
699
    default:
700
      gcc_unreachable ();
701
    }
702
}
703
 
704
/* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
705
   truth of CODE1 implies the truth of CODE2.  */
706
 
707
int
708
comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
709
{
710
  /* UNKNOWN comparison codes can happen as a result of trying to revert
711
     comparison codes.
712
     They can't match anything, so we have to reject them here.  */
713
  if (code1 == UNKNOWN || code2 == UNKNOWN)
714
    return 0;
715
 
716
  if (code1 == code2)
717
    return 1;
718
 
719
  switch (code1)
720
    {
721
    case UNEQ:
722
      if (code2 == UNLE || code2 == UNGE)
723
        return 1;
724
      break;
725
 
726
    case EQ:
727
      if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
728
          || code2 == ORDERED)
729
        return 1;
730
      break;
731
 
732
    case UNLT:
733
      if (code2 == UNLE || code2 == NE)
734
        return 1;
735
      break;
736
 
737
    case LT:
738
      if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
739
        return 1;
740
      break;
741
 
742
    case UNGT:
743
      if (code2 == UNGE || code2 == NE)
744
        return 1;
745
      break;
746
 
747
    case GT:
748
      if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
749
        return 1;
750
      break;
751
 
752
    case GE:
753
    case LE:
754
      if (code2 == ORDERED)
755
        return 1;
756
      break;
757
 
758
    case LTGT:
759
      if (code2 == NE || code2 == ORDERED)
760
        return 1;
761
      break;
762
 
763
    case LTU:
764
      if (code2 == LEU || code2 == NE)
765
        return 1;
766
      break;
767
 
768
    case GTU:
769
      if (code2 == GEU || code2 == NE)
770
        return 1;
771
      break;
772
 
773
    case UNORDERED:
774
      if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
775
          || code2 == UNGE || code2 == UNGT)
776
        return 1;
777
      break;
778
 
779
    default:
780
      break;
781
    }
782
 
783
  return 0;
784
}
785
 
786
/* Return 1 if INSN is an unconditional jump and nothing else.  */
787
 
788
int
789
simplejump_p (rtx insn)
790
{
791
  return (JUMP_P (insn)
792
          && GET_CODE (PATTERN (insn)) == SET
793
          && GET_CODE (SET_DEST (PATTERN (insn))) == PC
794
          && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
795
}
796
 
797
/* Return nonzero if INSN is a (possibly) conditional jump
798
   and nothing more.
799
 
800
   Use of this function is deprecated, since we need to support combined
801
   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
802
 
803
int
804
condjump_p (rtx insn)
805
{
806
  rtx x = PATTERN (insn);
807
 
808
  if (GET_CODE (x) != SET
809
      || GET_CODE (SET_DEST (x)) != PC)
810
    return 0;
811
 
812
  x = SET_SRC (x);
813
  if (GET_CODE (x) == LABEL_REF)
814
    return 1;
815
  else
816
    return (GET_CODE (x) == IF_THEN_ELSE
817
            && ((GET_CODE (XEXP (x, 2)) == PC
818
                 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
819
                     || GET_CODE (XEXP (x, 1)) == RETURN))
820
                || (GET_CODE (XEXP (x, 1)) == PC
821
                    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
822
                        || GET_CODE (XEXP (x, 2)) == RETURN))));
823
}
824
 
825
/* Return nonzero if INSN is a (possibly) conditional jump inside a
826
   PARALLEL.
827
 
828
   Use this function is deprecated, since we need to support combined
829
   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
830
 
831
int
832
condjump_in_parallel_p (rtx insn)
833
{
834
  rtx x = PATTERN (insn);
835
 
836
  if (GET_CODE (x) != PARALLEL)
837
    return 0;
838
  else
839
    x = XVECEXP (x, 0, 0);
840
 
841
  if (GET_CODE (x) != SET)
842
    return 0;
843
  if (GET_CODE (SET_DEST (x)) != PC)
844
    return 0;
845
  if (GET_CODE (SET_SRC (x)) == LABEL_REF)
846
    return 1;
847
  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
848
    return 0;
849
  if (XEXP (SET_SRC (x), 2) == pc_rtx
850
      && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
851
          || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
852
    return 1;
853
  if (XEXP (SET_SRC (x), 1) == pc_rtx
854
      && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
855
          || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
856
    return 1;
857
  return 0;
858
}
859
 
860
/* Return set of PC, otherwise NULL.  */
861
 
862
rtx
863
pc_set (rtx insn)
864
{
865
  rtx pat;
866
  if (!JUMP_P (insn))
867
    return NULL_RTX;
868
  pat = PATTERN (insn);
869
 
870
  /* The set is allowed to appear either as the insn pattern or
871
     the first set in a PARALLEL.  */
872
  if (GET_CODE (pat) == PARALLEL)
873
    pat = XVECEXP (pat, 0, 0);
874
  if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
875
    return pat;
876
 
877
  return NULL_RTX;
878
}
879
 
880
/* Return true when insn is an unconditional direct jump,
881
   possibly bundled inside a PARALLEL.  */
882
 
883
int
884
any_uncondjump_p (rtx insn)
885
{
886
  rtx x = pc_set (insn);
887
  if (!x)
888
    return 0;
889
  if (GET_CODE (SET_SRC (x)) != LABEL_REF)
890
    return 0;
891
  if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
892
    return 0;
893
  return 1;
894
}
895
 
896
/* Return true when insn is a conditional jump.  This function works for
897
   instructions containing PC sets in PARALLELs.  The instruction may have
898
   various other effects so before removing the jump you must verify
899
   onlyjump_p.
900
 
901
   Note that unlike condjump_p it returns false for unconditional jumps.  */
902
 
903
int
904
any_condjump_p (rtx insn)
905
{
906
  rtx x = pc_set (insn);
907
  enum rtx_code a, b;
908
 
909
  if (!x)
910
    return 0;
911
  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
912
    return 0;
913
 
914
  a = GET_CODE (XEXP (SET_SRC (x), 1));
915
  b = GET_CODE (XEXP (SET_SRC (x), 2));
916
 
917
  return ((b == PC && (a == LABEL_REF || a == RETURN))
918
          || (a == PC && (b == LABEL_REF || b == RETURN)));
919
}
920
 
921
/* Return the label of a conditional jump.  */
922
 
923
rtx
924
condjump_label (rtx insn)
925
{
926
  rtx x = pc_set (insn);
927
 
928
  if (!x)
929
    return NULL_RTX;
930
  x = SET_SRC (x);
931
  if (GET_CODE (x) == LABEL_REF)
932
    return x;
933
  if (GET_CODE (x) != IF_THEN_ELSE)
934
    return NULL_RTX;
935
  if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
936
    return XEXP (x, 1);
937
  if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
938
    return XEXP (x, 2);
939
  return NULL_RTX;
940
}
941
 
942
/* Return true if INSN is a (possibly conditional) return insn.  */
943
 
944
static int
945
returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
946
{
947
  rtx x = *loc;
948
 
949
  return x && (GET_CODE (x) == RETURN
950
               || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
951
}
952
 
953
int
954
returnjump_p (rtx insn)
955
{
956
  if (!JUMP_P (insn))
957
    return 0;
958
  return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
959
}
960
 
961
/* Return true if INSN is a jump that only transfers control and
962
   nothing more.  */
963
 
964
int
965
onlyjump_p (rtx insn)
966
{
967
  rtx set;
968
 
969
  if (!JUMP_P (insn))
970
    return 0;
971
 
972
  set = single_set (insn);
973
  if (set == NULL)
974
    return 0;
975
  if (GET_CODE (SET_DEST (set)) != PC)
976
    return 0;
977
  if (side_effects_p (SET_SRC (set)))
978
    return 0;
979
 
980
  return 1;
981
}
982
 
983
#ifdef HAVE_cc0
984
 
985
/* Return nonzero if X is an RTX that only sets the condition codes
986
   and has no side effects.  */
987
 
988
int
989
only_sets_cc0_p (rtx x)
990
{
991
  if (! x)
992
    return 0;
993
 
994
  if (INSN_P (x))
995
    x = PATTERN (x);
996
 
997
  return sets_cc0_p (x) == 1 && ! side_effects_p (x);
998
}
999
 
1000
/* Return 1 if X is an RTX that does nothing but set the condition codes
1001
   and CLOBBER or USE registers.
1002
   Return -1 if X does explicitly set the condition codes,
1003
   but also does other things.  */
1004
 
1005
int
1006
sets_cc0_p (rtx x)
1007
{
1008
  if (! x)
1009
    return 0;
1010
 
1011
  if (INSN_P (x))
1012
    x = PATTERN (x);
1013
 
1014
  if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1015
    return 1;
1016
  if (GET_CODE (x) == PARALLEL)
1017
    {
1018
      int i;
1019
      int sets_cc0 = 0;
1020
      int other_things = 0;
1021
      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1022
        {
1023
          if (GET_CODE (XVECEXP (x, 0, i)) == SET
1024
              && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1025
            sets_cc0 = 1;
1026
          else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1027
            other_things = 1;
1028
        }
1029
      return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1030
    }
1031
  return 0;
1032
}
1033
#endif
1034
 
1035
/* Follow any unconditional jump at LABEL;
1036
   return the ultimate label reached by any such chain of jumps.
1037
   Return null if the chain ultimately leads to a return instruction.
1038
   If LABEL is not followed by a jump, return LABEL.
1039
   If the chain loops or we can't find end, return LABEL,
1040
   since that tells caller to avoid changing the insn.
1041
 
1042
   If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1043
   a USE or CLOBBER.  */
1044
 
1045
rtx
1046
follow_jumps (rtx label)
1047
{
1048
  rtx insn;
1049
  rtx next;
1050
  rtx value = label;
1051
  int depth;
1052
 
1053
  for (depth = 0;
1054
       (depth < 10
1055
        && (insn = next_active_insn (value)) != 0
1056
        && JUMP_P (insn)
1057
        && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1058
             && onlyjump_p (insn))
1059
            || GET_CODE (PATTERN (insn)) == RETURN)
1060
        && (next = NEXT_INSN (insn))
1061
        && BARRIER_P (next));
1062
       depth++)
1063
    {
1064
      /* Don't chain through the insn that jumps into a loop
1065
         from outside the loop,
1066
         since that would create multiple loop entry jumps
1067
         and prevent loop optimization.  */
1068
      rtx tem;
1069
      if (!reload_completed)
1070
        for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1071
          if (NOTE_P (tem)
1072
              && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1073
                  /* ??? Optional.  Disables some optimizations, but makes
1074
                     gcov output more accurate with -O.  */
1075
                  || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1076
            return value;
1077
 
1078
      /* If we have found a cycle, make the insn jump to itself.  */
1079
      if (JUMP_LABEL (insn) == label)
1080
        return label;
1081
 
1082
      tem = next_active_insn (JUMP_LABEL (insn));
1083
      if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1084
                  || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1085
        break;
1086
 
1087
      value = JUMP_LABEL (insn);
1088
    }
1089
  if (depth == 10)
1090
    return label;
1091
  return value;
1092
}
1093
 
1094
 
1095
/* Find all CODE_LABELs referred to in X, and increment their use counts.
1096
   If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1097
   in INSN, then store one of them in JUMP_LABEL (INSN).
1098
   If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1099
   referenced in INSN, add a REG_LABEL note containing that label to INSN.
1100
   Also, when there are consecutive labels, canonicalize on the last of them.
1101
 
1102
   Note that two labels separated by a loop-beginning note
1103
   must be kept distinct if we have not yet done loop-optimization,
1104
   because the gap between them is where loop-optimize
1105
   will want to move invariant code to.  CROSS_JUMP tells us
1106
   that loop-optimization is done with.  */
1107
 
1108
void
1109
mark_jump_label (rtx x, rtx insn, int in_mem)
1110
{
1111
  RTX_CODE code = GET_CODE (x);
1112
  int i;
1113
  const char *fmt;
1114
 
1115
  switch (code)
1116
    {
1117
    case PC:
1118
    case CC0:
1119
    case REG:
1120
    case CONST_INT:
1121
    case CONST_DOUBLE:
1122
    case CLOBBER:
1123
    case CALL:
1124
      return;
1125
 
1126
    case MEM:
1127
      in_mem = 1;
1128
      break;
1129
 
1130
    case SYMBOL_REF:
1131
      if (!in_mem)
1132
        return;
1133
 
1134
      /* If this is a constant-pool reference, see if it is a label.  */
1135
      if (CONSTANT_POOL_ADDRESS_P (x))
1136
        mark_jump_label (get_pool_constant (x), insn, in_mem);
1137
      break;
1138
 
1139
    case LABEL_REF:
1140
      {
1141
        rtx label = XEXP (x, 0);
1142
 
1143
        /* Ignore remaining references to unreachable labels that
1144
           have been deleted.  */
1145
        if (NOTE_P (label)
1146
            && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1147
          break;
1148
 
1149
        gcc_assert (LABEL_P (label));
1150
 
1151
        /* Ignore references to labels of containing functions.  */
1152
        if (LABEL_REF_NONLOCAL_P (x))
1153
          break;
1154
 
1155
        XEXP (x, 0) = label;
1156
        if (! insn || ! INSN_DELETED_P (insn))
1157
          ++LABEL_NUSES (label);
1158
 
1159
        if (insn)
1160
          {
1161
            if (JUMP_P (insn))
1162
              JUMP_LABEL (insn) = label;
1163
            else
1164
              {
1165
                /* Add a REG_LABEL note for LABEL unless there already
1166
                   is one.  All uses of a label, except for labels
1167
                   that are the targets of jumps, must have a
1168
                   REG_LABEL note.  */
1169
                if (! find_reg_note (insn, REG_LABEL, label))
1170
                  REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1171
                                                        REG_NOTES (insn));
1172
              }
1173
          }
1174
        return;
1175
      }
1176
 
1177
  /* Do walk the labels in a vector, but not the first operand of an
1178
     ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
1179
    case ADDR_VEC:
1180
    case ADDR_DIFF_VEC:
1181
      if (! INSN_DELETED_P (insn))
1182
        {
1183
          int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1184
 
1185
          for (i = 0; i < XVECLEN (x, eltnum); i++)
1186
            mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1187
        }
1188
      return;
1189
 
1190
    default:
1191
      break;
1192
    }
1193
 
1194
  fmt = GET_RTX_FORMAT (code);
1195
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1196
    {
1197
      if (fmt[i] == 'e')
1198
        mark_jump_label (XEXP (x, i), insn, in_mem);
1199
      else if (fmt[i] == 'E')
1200
        {
1201
          int j;
1202
          for (j = 0; j < XVECLEN (x, i); j++)
1203
            mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1204
        }
1205
    }
1206
}
1207
 
1208
/* If all INSN does is set the pc, delete it,
1209
   and delete the insn that set the condition codes for it
1210
   if that's what the previous thing was.  */
1211
 
1212
void
1213
delete_jump (rtx insn)
1214
{
1215
  rtx set = single_set (insn);
1216
 
1217
  if (set && GET_CODE (SET_DEST (set)) == PC)
1218
    delete_computation (insn);
1219
}
1220
 
1221
/* Recursively delete prior insns that compute the value (used only by INSN
1222
   which the caller is deleting) stored in the register mentioned by NOTE
1223
   which is a REG_DEAD note associated with INSN.  */
1224
 
1225
static void
1226
delete_prior_computation (rtx note, rtx insn)
1227
{
1228
  rtx our_prev;
1229
  rtx reg = XEXP (note, 0);
1230
 
1231
  for (our_prev = prev_nonnote_insn (insn);
1232
       our_prev && (NONJUMP_INSN_P (our_prev)
1233
                    || CALL_P (our_prev));
1234
       our_prev = prev_nonnote_insn (our_prev))
1235
    {
1236
      rtx pat = PATTERN (our_prev);
1237
 
1238
      /* If we reach a CALL which is not calling a const function
1239
         or the callee pops the arguments, then give up.  */
1240
      if (CALL_P (our_prev)
1241
          && (! CONST_OR_PURE_CALL_P (our_prev)
1242
              || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1243
        break;
1244
 
1245
      /* If we reach a SEQUENCE, it is too complex to try to
1246
         do anything with it, so give up.  We can be run during
1247
         and after reorg, so SEQUENCE rtl can legitimately show
1248
         up here.  */
1249
      if (GET_CODE (pat) == SEQUENCE)
1250
        break;
1251
 
1252
      if (GET_CODE (pat) == USE
1253
          && NONJUMP_INSN_P (XEXP (pat, 0)))
1254
        /* reorg creates USEs that look like this.  We leave them
1255
           alone because reorg needs them for its own purposes.  */
1256
        break;
1257
 
1258
      if (reg_set_p (reg, pat))
1259
        {
1260
          if (side_effects_p (pat) && !CALL_P (our_prev))
1261
            break;
1262
 
1263
          if (GET_CODE (pat) == PARALLEL)
1264
            {
1265
              /* If we find a SET of something else, we can't
1266
                 delete the insn.  */
1267
 
1268
              int i;
1269
 
1270
              for (i = 0; i < XVECLEN (pat, 0); i++)
1271
                {
1272
                  rtx part = XVECEXP (pat, 0, i);
1273
 
1274
                  if (GET_CODE (part) == SET
1275
                      && SET_DEST (part) != reg)
1276
                    break;
1277
                }
1278
 
1279
              if (i == XVECLEN (pat, 0))
1280
                delete_computation (our_prev);
1281
            }
1282
          else if (GET_CODE (pat) == SET
1283
                   && REG_P (SET_DEST (pat)))
1284
            {
1285
              int dest_regno = REGNO (SET_DEST (pat));
1286
              int dest_endregno
1287
                = (dest_regno
1288
                   + (dest_regno < FIRST_PSEUDO_REGISTER
1289
                      ? hard_regno_nregs[dest_regno]
1290
                                        [GET_MODE (SET_DEST (pat))] : 1));
1291
              int regno = REGNO (reg);
1292
              int endregno
1293
                = (regno
1294
                   + (regno < FIRST_PSEUDO_REGISTER
1295
                      ? hard_regno_nregs[regno][GET_MODE (reg)] : 1));
1296
 
1297
              if (dest_regno >= regno
1298
                  && dest_endregno <= endregno)
1299
                delete_computation (our_prev);
1300
 
1301
              /* We may have a multi-word hard register and some, but not
1302
                 all, of the words of the register are needed in subsequent
1303
                 insns.  Write REG_UNUSED notes for those parts that were not
1304
                 needed.  */
1305
              else if (dest_regno <= regno
1306
                       && dest_endregno >= endregno)
1307
                {
1308
                  int i;
1309
 
1310
                  REG_NOTES (our_prev)
1311
                    = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1312
                                         REG_NOTES (our_prev));
1313
 
1314
                  for (i = dest_regno; i < dest_endregno; i++)
1315
                    if (! find_regno_note (our_prev, REG_UNUSED, i))
1316
                      break;
1317
 
1318
                  if (i == dest_endregno)
1319
                    delete_computation (our_prev);
1320
                }
1321
            }
1322
 
1323
          break;
1324
        }
1325
 
1326
      /* If PAT references the register that dies here, it is an
1327
         additional use.  Hence any prior SET isn't dead.  However, this
1328
         insn becomes the new place for the REG_DEAD note.  */
1329
      if (reg_overlap_mentioned_p (reg, pat))
1330
        {
1331
          XEXP (note, 1) = REG_NOTES (our_prev);
1332
          REG_NOTES (our_prev) = note;
1333
          break;
1334
        }
1335
    }
1336
}
1337
 
1338
/* Delete INSN and recursively delete insns that compute values used only
1339
   by INSN.  This uses the REG_DEAD notes computed during flow analysis.
1340
   If we are running before flow.c, we need do nothing since flow.c will
1341
   delete dead code.  We also can't know if the registers being used are
1342
   dead or not at this point.
1343
 
1344
   Otherwise, look at all our REG_DEAD notes.  If a previous insn does
1345
   nothing other than set a register that dies in this insn, we can delete
1346
   that insn as well.
1347
 
1348
   On machines with CC0, if CC0 is used in this insn, we may be able to
1349
   delete the insn that set it.  */
1350
 
1351
static void
1352
delete_computation (rtx insn)
1353
{
1354
  rtx note, next;
1355
 
1356
#ifdef HAVE_cc0
1357
  if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1358
    {
1359
      rtx prev = prev_nonnote_insn (insn);
1360
      /* We assume that at this stage
1361
         CC's are always set explicitly
1362
         and always immediately before the jump that
1363
         will use them.  So if the previous insn
1364
         exists to set the CC's, delete it
1365
         (unless it performs auto-increments, etc.).  */
1366
      if (prev && NONJUMP_INSN_P (prev)
1367
          && sets_cc0_p (PATTERN (prev)))
1368
        {
1369
          if (sets_cc0_p (PATTERN (prev)) > 0
1370
              && ! side_effects_p (PATTERN (prev)))
1371
            delete_computation (prev);
1372
          else
1373
            /* Otherwise, show that cc0 won't be used.  */
1374
            REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1375
                                                  cc0_rtx, REG_NOTES (prev));
1376
        }
1377
    }
1378
#endif
1379
 
1380
  for (note = REG_NOTES (insn); note; note = next)
1381
    {
1382
      next = XEXP (note, 1);
1383
 
1384
      if (REG_NOTE_KIND (note) != REG_DEAD
1385
          /* Verify that the REG_NOTE is legitimate.  */
1386
          || !REG_P (XEXP (note, 0)))
1387
        continue;
1388
 
1389
      delete_prior_computation (note, insn);
1390
    }
1391
 
1392
  delete_related_insns (insn);
1393
}
1394
 
1395
/* Delete insn INSN from the chain of insns and update label ref counts
1396
   and delete insns now unreachable.
1397
 
1398
   Returns the first insn after INSN that was not deleted.
1399
 
1400
   Usage of this instruction is deprecated.  Use delete_insn instead and
1401
   subsequent cfg_cleanup pass to delete unreachable code if needed.  */
1402
 
1403
rtx
1404
delete_related_insns (rtx insn)
1405
{
1406
  int was_code_label = (LABEL_P (insn));
1407
  rtx note;
1408
  rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1409
 
1410
  while (next && INSN_DELETED_P (next))
1411
    next = NEXT_INSN (next);
1412
 
1413
  /* This insn is already deleted => return first following nondeleted.  */
1414
  if (INSN_DELETED_P (insn))
1415
    return next;
1416
 
1417
  delete_insn (insn);
1418
 
1419
  /* If instruction is followed by a barrier,
1420
     delete the barrier too.  */
1421
 
1422
  if (next != 0 && BARRIER_P (next))
1423
    delete_insn (next);
1424
 
1425
  /* If deleting a jump, decrement the count of the label,
1426
     and delete the label if it is now unused.  */
1427
 
1428
  if (JUMP_P (insn) && JUMP_LABEL (insn))
1429
    {
1430
      rtx lab = JUMP_LABEL (insn), lab_next;
1431
 
1432
      if (LABEL_NUSES (lab) == 0)
1433
        {
1434
          /* This can delete NEXT or PREV,
1435
             either directly if NEXT is JUMP_LABEL (INSN),
1436
             or indirectly through more levels of jumps.  */
1437
          delete_related_insns (lab);
1438
 
1439
          /* I feel a little doubtful about this loop,
1440
             but I see no clean and sure alternative way
1441
             to find the first insn after INSN that is not now deleted.
1442
             I hope this works.  */
1443
          while (next && INSN_DELETED_P (next))
1444
            next = NEXT_INSN (next);
1445
          return next;
1446
        }
1447
      else if (tablejump_p (insn, NULL, &lab_next))
1448
        {
1449
          /* If we're deleting the tablejump, delete the dispatch table.
1450
             We may not be able to kill the label immediately preceding
1451
             just yet, as it might be referenced in code leading up to
1452
             the tablejump.  */
1453
          delete_related_insns (lab_next);
1454
        }
1455
    }
1456
 
1457
  /* Likewise if we're deleting a dispatch table.  */
1458
 
1459
  if (JUMP_P (insn)
1460
      && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1461
          || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1462
    {
1463
      rtx pat = PATTERN (insn);
1464
      int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1465
      int len = XVECLEN (pat, diff_vec_p);
1466
 
1467
      for (i = 0; i < len; i++)
1468
        if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1469
          delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1470
      while (next && INSN_DELETED_P (next))
1471
        next = NEXT_INSN (next);
1472
      return next;
1473
    }
1474
 
1475
  /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note.  */
1476
  if (NONJUMP_INSN_P (insn) || CALL_P (insn))
1477
    for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1478
      if (REG_NOTE_KIND (note) == REG_LABEL
1479
          /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1480
          && LABEL_P (XEXP (note, 0)))
1481
        if (LABEL_NUSES (XEXP (note, 0)) == 0)
1482
          delete_related_insns (XEXP (note, 0));
1483
 
1484
  while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
1485
    prev = PREV_INSN (prev);
1486
 
1487
  /* If INSN was a label and a dispatch table follows it,
1488
     delete the dispatch table.  The tablejump must have gone already.
1489
     It isn't useful to fall through into a table.  */
1490
 
1491
  if (was_code_label
1492
      && NEXT_INSN (insn) != 0
1493
      && JUMP_P (NEXT_INSN (insn))
1494
      && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1495
          || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1496
    next = delete_related_insns (NEXT_INSN (insn));
1497
 
1498
  /* If INSN was a label, delete insns following it if now unreachable.  */
1499
 
1500
  if (was_code_label && prev && BARRIER_P (prev))
1501
    {
1502
      enum rtx_code code;
1503
      while (next)
1504
        {
1505
          code = GET_CODE (next);
1506
          if (code == NOTE
1507
              && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1508
            next = NEXT_INSN (next);
1509
          /* Keep going past other deleted labels to delete what follows.  */
1510
          else if (code == CODE_LABEL && INSN_DELETED_P (next))
1511
            next = NEXT_INSN (next);
1512
          else if (code == BARRIER || INSN_P (next))
1513
            /* Note: if this deletes a jump, it can cause more
1514
               deletion of unreachable code, after a different label.
1515
               As long as the value from this recursive call is correct,
1516
               this invocation functions correctly.  */
1517
            next = delete_related_insns (next);
1518
          else
1519
            break;
1520
        }
1521
    }
1522
 
1523
  return next;
1524
}
1525
 
1526
/* Delete a range of insns from FROM to TO, inclusive.
1527
   This is for the sake of peephole optimization, so assume
1528
   that whatever these insns do will still be done by a new
1529
   peephole insn that will replace them.  */
1530
 
1531
void
1532
delete_for_peephole (rtx from, rtx to)
1533
{
1534
  rtx insn = from;
1535
 
1536
  while (1)
1537
    {
1538
      rtx next = NEXT_INSN (insn);
1539
      rtx prev = PREV_INSN (insn);
1540
 
1541
      if (!NOTE_P (insn))
1542
        {
1543
          INSN_DELETED_P (insn) = 1;
1544
 
1545
          /* Patch this insn out of the chain.  */
1546
          /* We don't do this all at once, because we
1547
             must preserve all NOTEs.  */
1548
          if (prev)
1549
            NEXT_INSN (prev) = next;
1550
 
1551
          if (next)
1552
            PREV_INSN (next) = prev;
1553
        }
1554
 
1555
      if (insn == to)
1556
        break;
1557
      insn = next;
1558
    }
1559
 
1560
  /* Note that if TO is an unconditional jump
1561
     we *do not* delete the BARRIER that follows,
1562
     since the peephole that replaces this sequence
1563
     is also an unconditional jump in that case.  */
1564
}
1565
 
1566
/* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
1567
   NLABEL as a return.  Accrue modifications into the change group.  */
1568
 
1569
static void
1570
redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1571
{
1572
  rtx x = *loc;
1573
  RTX_CODE code = GET_CODE (x);
1574
  int i;
1575
  const char *fmt;
1576
 
1577
  if (code == LABEL_REF)
1578
    {
1579
      if (XEXP (x, 0) == olabel)
1580
        {
1581
          rtx n;
1582
          if (nlabel)
1583
            n = gen_rtx_LABEL_REF (Pmode, nlabel);
1584
          else
1585
            n = gen_rtx_RETURN (VOIDmode);
1586
 
1587
          validate_change (insn, loc, n, 1);
1588
          return;
1589
        }
1590
    }
1591
  else if (code == RETURN && olabel == 0)
1592
    {
1593
      if (nlabel)
1594
        x = gen_rtx_LABEL_REF (Pmode, nlabel);
1595
      else
1596
        x = gen_rtx_RETURN (VOIDmode);
1597
      if (loc == &PATTERN (insn))
1598
        x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1599
      validate_change (insn, loc, x, 1);
1600
      return;
1601
    }
1602
 
1603
  if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1604
      && GET_CODE (SET_SRC (x)) == LABEL_REF
1605
      && XEXP (SET_SRC (x), 0) == olabel)
1606
    {
1607
      validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1608
      return;
1609
    }
1610
 
1611
  fmt = GET_RTX_FORMAT (code);
1612
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1613
    {
1614
      if (fmt[i] == 'e')
1615
        redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1616
      else if (fmt[i] == 'E')
1617
        {
1618
          int j;
1619
          for (j = 0; j < XVECLEN (x, i); j++)
1620
            redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1621
        }
1622
    }
1623
}
1624
 
1625
/* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
1626
   the modifications into the change group.  Return false if we did
1627
   not see how to do that.  */
1628
 
1629
int
1630
redirect_jump_1 (rtx jump, rtx nlabel)
1631
{
1632
  int ochanges = num_validated_changes ();
1633
  rtx *loc;
1634
 
1635
  if (GET_CODE (PATTERN (jump)) == PARALLEL)
1636
    loc = &XVECEXP (PATTERN (jump), 0, 0);
1637
  else
1638
    loc = &PATTERN (jump);
1639
 
1640
  redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1641
  return num_validated_changes () > ochanges;
1642
}
1643
 
1644
/* Make JUMP go to NLABEL instead of where it jumps now.  If the old
1645
   jump target label is unused as a result, it and the code following
1646
   it may be deleted.
1647
 
1648
   If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1649
   RETURN insn.
1650
 
1651
   The return value will be 1 if the change was made, 0 if it wasn't
1652
   (this can only occur for NLABEL == 0).  */
1653
 
1654
int
1655
redirect_jump (rtx jump, rtx nlabel, int delete_unused)
1656
{
1657
  rtx olabel = JUMP_LABEL (jump);
1658
 
1659
  if (nlabel == olabel)
1660
    return 1;
1661
 
1662
  if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1663
    return 0;
1664
 
1665
  redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1666
  return 1;
1667
}
1668
 
1669
/* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1670
   NLABEL in JUMP.  If DELETE_UNUSED is non-negative, copy a
1671
   NOTE_INSN_FUNCTION_END found after OLABEL to the place after NLABEL.
1672
   If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1673
   count has dropped to zero.  */
1674
void
1675
redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
1676
                 int invert)
1677
{
1678
  rtx note;
1679
 
1680
  JUMP_LABEL (jump) = nlabel;
1681
  if (nlabel)
1682
    ++LABEL_NUSES (nlabel);
1683
 
1684
  /* Update labels in any REG_EQUAL note.  */
1685
  if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1686
    {
1687
      if (!nlabel || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1688
        remove_note (jump, note);
1689
      else
1690
        {
1691
          redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1692
          confirm_change_group ();
1693
        }
1694
    }
1695
 
1696
  /* If we're eliding the jump over exception cleanups at the end of a
1697
     function, move the function end note so that -Wreturn-type works.  */
1698
  if (olabel && nlabel
1699
      && NEXT_INSN (olabel)
1700
      && NOTE_P (NEXT_INSN (olabel))
1701
      && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END
1702
      && delete_unused >= 0)
1703
    emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
1704
 
1705
  if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1706
      /* Undefined labels will remain outside the insn stream.  */
1707
      && INSN_UID (olabel))
1708
    delete_related_insns (olabel);
1709
  if (invert)
1710
    invert_br_probabilities (jump);
1711
}
1712
 
1713
/* Invert the jump condition X contained in jump insn INSN.  Accrue the
1714
   modifications into the change group.  Return nonzero for success.  */
1715
static int
1716
invert_exp_1 (rtx x, rtx insn)
1717
{
1718
  RTX_CODE code = GET_CODE (x);
1719
 
1720
  if (code == IF_THEN_ELSE)
1721
    {
1722
      rtx comp = XEXP (x, 0);
1723
      rtx tem;
1724
      enum rtx_code reversed_code;
1725
 
1726
      /* We can do this in two ways:  The preferable way, which can only
1727
         be done if this is not an integer comparison, is to reverse
1728
         the comparison code.  Otherwise, swap the THEN-part and ELSE-part
1729
         of the IF_THEN_ELSE.  If we can't do either, fail.  */
1730
 
1731
      reversed_code = reversed_comparison_code (comp, insn);
1732
 
1733
      if (reversed_code != UNKNOWN)
1734
        {
1735
          validate_change (insn, &XEXP (x, 0),
1736
                           gen_rtx_fmt_ee (reversed_code,
1737
                                           GET_MODE (comp), XEXP (comp, 0),
1738
                                           XEXP (comp, 1)),
1739
                           1);
1740
          return 1;
1741
        }
1742
 
1743
      tem = XEXP (x, 1);
1744
      validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1745
      validate_change (insn, &XEXP (x, 2), tem, 1);
1746
      return 1;
1747
    }
1748
  else
1749
    return 0;
1750
}
1751
 
1752
/* Invert the condition of the jump JUMP, and make it jump to label
1753
   NLABEL instead of where it jumps now.  Accrue changes into the
1754
   change group.  Return false if we didn't see how to perform the
1755
   inversion and redirection.  */
1756
 
1757
int
1758
invert_jump_1 (rtx jump, rtx nlabel)
1759
{
1760
  rtx x = pc_set (jump);
1761
  int ochanges;
1762
  int ok;
1763
 
1764
  ochanges = num_validated_changes ();
1765
  gcc_assert (x);
1766
  ok = invert_exp_1 (SET_SRC (x), jump);
1767
  gcc_assert (ok);
1768
 
1769
  if (num_validated_changes () == ochanges)
1770
    return 0;
1771
 
1772
  /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1773
     in Pmode, so checking this is not merely an optimization.  */
1774
  return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1775
}
1776
 
1777
/* Invert the condition of the jump JUMP, and make it jump to label
1778
   NLABEL instead of where it jumps now.  Return true if successful.  */
1779
 
1780
int
1781
invert_jump (rtx jump, rtx nlabel, int delete_unused)
1782
{
1783
  rtx olabel = JUMP_LABEL (jump);
1784
 
1785
  if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1786
    {
1787
      redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1788
      return 1;
1789
    }
1790
  cancel_changes (0);
1791
  return 0;
1792
}
1793
 
1794
 
1795
/* Like rtx_equal_p except that it considers two REGs as equal
1796
   if they renumber to the same value and considers two commutative
1797
   operations to be the same if the order of the operands has been
1798
   reversed.  */
1799
 
1800
int
1801
rtx_renumbered_equal_p (rtx x, rtx y)
1802
{
1803
  int i;
1804
  enum rtx_code code = GET_CODE (x);
1805
  const char *fmt;
1806
 
1807
  if (x == y)
1808
    return 1;
1809
 
1810
  if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1811
      && (REG_P (y) || (GET_CODE (y) == SUBREG
1812
                                  && REG_P (SUBREG_REG (y)))))
1813
    {
1814
      int reg_x = -1, reg_y = -1;
1815
      int byte_x = 0, byte_y = 0;
1816
 
1817
      if (GET_MODE (x) != GET_MODE (y))
1818
        return 0;
1819
 
1820
      /* If we haven't done any renumbering, don't
1821
         make any assumptions.  */
1822
      if (reg_renumber == 0)
1823
        return rtx_equal_p (x, y);
1824
 
1825
      if (code == SUBREG)
1826
        {
1827
          reg_x = REGNO (SUBREG_REG (x));
1828
          byte_x = SUBREG_BYTE (x);
1829
 
1830
          if (reg_renumber[reg_x] >= 0)
1831
            {
1832
              reg_x = subreg_regno_offset (reg_renumber[reg_x],
1833
                                           GET_MODE (SUBREG_REG (x)),
1834
                                           byte_x,
1835
                                           GET_MODE (x));
1836
              byte_x = 0;
1837
            }
1838
        }
1839
      else
1840
        {
1841
          reg_x = REGNO (x);
1842
          if (reg_renumber[reg_x] >= 0)
1843
            reg_x = reg_renumber[reg_x];
1844
        }
1845
 
1846
      if (GET_CODE (y) == SUBREG)
1847
        {
1848
          reg_y = REGNO (SUBREG_REG (y));
1849
          byte_y = SUBREG_BYTE (y);
1850
 
1851
          if (reg_renumber[reg_y] >= 0)
1852
            {
1853
              reg_y = subreg_regno_offset (reg_renumber[reg_y],
1854
                                           GET_MODE (SUBREG_REG (y)),
1855
                                           byte_y,
1856
                                           GET_MODE (y));
1857
              byte_y = 0;
1858
            }
1859
        }
1860
      else
1861
        {
1862
          reg_y = REGNO (y);
1863
          if (reg_renumber[reg_y] >= 0)
1864
            reg_y = reg_renumber[reg_y];
1865
        }
1866
 
1867
      return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1868
    }
1869
 
1870
  /* Now we have disposed of all the cases
1871
     in which different rtx codes can match.  */
1872
  if (code != GET_CODE (y))
1873
    return 0;
1874
 
1875
  switch (code)
1876
    {
1877
    case PC:
1878
    case CC0:
1879
    case ADDR_VEC:
1880
    case ADDR_DIFF_VEC:
1881
    case CONST_INT:
1882
    case CONST_DOUBLE:
1883
      return 0;
1884
 
1885
    case LABEL_REF:
1886
      /* We can't assume nonlocal labels have their following insns yet.  */
1887
      if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1888
        return XEXP (x, 0) == XEXP (y, 0);
1889
 
1890
      /* Two label-refs are equivalent if they point at labels
1891
         in the same position in the instruction stream.  */
1892
      return (next_real_insn (XEXP (x, 0))
1893
              == next_real_insn (XEXP (y, 0)));
1894
 
1895
    case SYMBOL_REF:
1896
      return XSTR (x, 0) == XSTR (y, 0);
1897
 
1898
    case CODE_LABEL:
1899
      /* If we didn't match EQ equality above, they aren't the same.  */
1900
      return 0;
1901
 
1902
    default:
1903
      break;
1904
    }
1905
 
1906
  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
1907
 
1908
  if (GET_MODE (x) != GET_MODE (y))
1909
    return 0;
1910
 
1911
  /* For commutative operations, the RTX match if the operand match in any
1912
     order.  Also handle the simple binary and unary cases without a loop.  */
1913
  if (targetm.commutative_p (x, UNKNOWN))
1914
    return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1915
             && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1916
            || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1917
                && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1918
  else if (NON_COMMUTATIVE_P (x))
1919
    return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1920
            && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1921
  else if (UNARY_P (x))
1922
    return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1923
 
1924
  /* Compare the elements.  If any pair of corresponding elements
1925
     fail to match, return 0 for the whole things.  */
1926
 
1927
  fmt = GET_RTX_FORMAT (code);
1928
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1929
    {
1930
      int j;
1931
      switch (fmt[i])
1932
        {
1933
        case 'w':
1934
          if (XWINT (x, i) != XWINT (y, i))
1935
            return 0;
1936
          break;
1937
 
1938
        case 'i':
1939
          if (XINT (x, i) != XINT (y, i))
1940
            return 0;
1941
          break;
1942
 
1943
        case 't':
1944
          if (XTREE (x, i) != XTREE (y, i))
1945
            return 0;
1946
          break;
1947
 
1948
        case 's':
1949
          if (strcmp (XSTR (x, i), XSTR (y, i)))
1950
            return 0;
1951
          break;
1952
 
1953
        case 'e':
1954
          if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1955
            return 0;
1956
          break;
1957
 
1958
        case 'u':
1959
          if (XEXP (x, i) != XEXP (y, i))
1960
            return 0;
1961
          /* Fall through.  */
1962
        case '0':
1963
          break;
1964
 
1965
        case 'E':
1966
          if (XVECLEN (x, i) != XVECLEN (y, i))
1967
            return 0;
1968
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1969
            if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1970
              return 0;
1971
          break;
1972
 
1973
        default:
1974
          gcc_unreachable ();
1975
        }
1976
    }
1977
  return 1;
1978
}
1979
 
1980
/* If X is a hard register or equivalent to one or a subregister of one,
1981
   return the hard register number.  If X is a pseudo register that was not
1982
   assigned a hard register, return the pseudo register number.  Otherwise,
1983
   return -1.  Any rtx is valid for X.  */
1984
 
1985
int
1986
true_regnum (rtx x)
1987
{
1988
  if (REG_P (x))
1989
    {
1990
      if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
1991
        return reg_renumber[REGNO (x)];
1992
      return REGNO (x);
1993
    }
1994
  if (GET_CODE (x) == SUBREG)
1995
    {
1996
      int base = true_regnum (SUBREG_REG (x));
1997
      if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
1998
        return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
1999
                                           GET_MODE (SUBREG_REG (x)),
2000
                                           SUBREG_BYTE (x), GET_MODE (x));
2001
    }
2002
  return -1;
2003
}
2004
 
2005
/* Return regno of the register REG and handle subregs too.  */
2006
unsigned int
2007
reg_or_subregno (rtx reg)
2008
{
2009
  if (GET_CODE (reg) == SUBREG)
2010
    reg = SUBREG_REG (reg);
2011
  gcc_assert (REG_P (reg));
2012
  return REGNO (reg);
2013
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.