OpenCores
URL https://opencores.org/ocsvn/openrisc_2011-10-31/openrisc_2011-10-31/trunk

Subversion Repositories openrisc_2011-10-31

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.2.2/] [gcc/] [jump.c] - Blame information for rev 634

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Optimize jump instructions, for GNU compiler.
2
   Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3
   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
/* This is the pathetic reminder of old fame of the jump-optimization pass
23
   of the compiler.  Now it contains basically a set of utility functions to
24
   operate with jumps.
25
 
26
   Each CODE_LABEL has a count of the times it is used
27
   stored in the LABEL_NUSES internal field, and each JUMP_INSN
28
   has one label that it refers to stored in the
29
   JUMP_LABEL internal field.  With this we can detect labels that
30
   become unused because of the deletion of all the jumps that
31
   formerly used them.  The JUMP_LABEL info is sometimes looked
32
   at by later passes.
33
 
34
   The subroutines redirect_jump and invert_jump are used
35
   from other passes as well.  */
36
 
37
#include "config.h"
38
#include "system.h"
39
#include "coretypes.h"
40
#include "tm.h"
41
#include "rtl.h"
42
#include "tm_p.h"
43
#include "flags.h"
44
#include "hard-reg-set.h"
45
#include "regs.h"
46
#include "insn-config.h"
47
#include "insn-attr.h"
48
#include "recog.h"
49
#include "function.h"
50
#include "expr.h"
51
#include "real.h"
52
#include "except.h"
53
#include "diagnostic.h"
54
#include "toplev.h"
55
#include "reload.h"
56
#include "predict.h"
57
#include "timevar.h"
58
#include "tree-pass.h"
59
#include "target.h"
60
 
61
/* Optimize jump y; x: ... y: jumpif... x?
62
   Don't know if it is worth bothering with.  */
63
/* Optimize two cases of conditional jump to conditional jump?
64
   This can never delete any instruction or make anything dead,
65
   or even change what is live at any point.
66
   So perhaps let combiner do it.  */
67
 
68
static void init_label_info (rtx);
69
static void mark_all_labels (rtx);
70
static void delete_computation (rtx);
71
static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
72
static int invert_exp_1 (rtx, rtx);
73
static int returnjump_p_1 (rtx *, void *);
74
static void delete_prior_computation (rtx, rtx);
75
 
76
/* Alternate entry into the jump optimizer.  This entry point only rebuilds
77
   the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
78
   instructions.  */
79
void
80
rebuild_jump_labels (rtx f)
81
{
82
  rtx insn;
83
 
84
  timevar_push (TV_REBUILD_JUMP);
85
  init_label_info (f);
86
  mark_all_labels (f);
87
 
88
  /* Keep track of labels used from static data; we don't track them
89
     closely enough to delete them here, so make sure their reference
90
     count doesn't drop to zero.  */
91
 
92
  for (insn = forced_labels; insn; insn = XEXP (insn, 1))
93
    if (LABEL_P (XEXP (insn, 0)))
94
      LABEL_NUSES (XEXP (insn, 0))++;
95
  timevar_pop (TV_REBUILD_JUMP);
96
}
97
 
98
/* Some old code expects exactly one BARRIER as the NEXT_INSN of a
99
   non-fallthru insn.  This is not generally true, as multiple barriers
100
   may have crept in, or the BARRIER may be separated from the last
101
   real insn by one or more NOTEs.
102
 
103
   This simple pass moves barriers and removes duplicates so that the
104
   old code is happy.
105
 */
106
unsigned int
107
cleanup_barriers (void)
108
{
109
  rtx insn, next, prev;
110
  for (insn = get_insns (); insn; insn = next)
111
    {
112
      next = NEXT_INSN (insn);
113
      if (BARRIER_P (insn))
114
        {
115
          prev = prev_nonnote_insn (insn);
116
          if (BARRIER_P (prev))
117
            delete_insn (insn);
118
          else if (prev != PREV_INSN (insn))
119
            reorder_insns (insn, insn, prev);
120
        }
121
    }
122
  return 0;
123
}
124
 
125
struct tree_opt_pass pass_cleanup_barriers =
126
{
127
  "barriers",                           /* name */
128
  NULL,                                 /* gate */
129
  cleanup_barriers,                     /* execute */
130
  NULL,                                 /* sub */
131
  NULL,                                 /* next */
132
  0,                                    /* static_pass_number */
133
  0,                                    /* tv_id */
134
  0,                                    /* properties_required */
135
  0,                                    /* properties_provided */
136
  0,                                    /* properties_destroyed */
137
  0,                                    /* todo_flags_start */
138
  TODO_dump_func,                       /* todo_flags_finish */
139
 
140
};
141
 
142
unsigned int
143
purge_line_number_notes (void)
144
{
145
  rtx last_note = 0;
146
  rtx insn;
147
  /* Delete extraneous line number notes.
148
     Note that two consecutive notes for different lines are not really
149
     extraneous.  There should be some indication where that line belonged,
150
     even if it became empty.  */
151
 
152
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
153
    if (NOTE_P (insn))
154
      {
155
        if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
156
          /* Any previous line note was for the prologue; gdb wants a new
157
             note after the prologue even if it is for the same line.  */
158
          last_note = NULL_RTX;
159
        else if (NOTE_LINE_NUMBER (insn) >= 0)
160
          {
161
            /* Delete this note if it is identical to previous note.  */
162
            if (last_note
163
#ifdef USE_MAPPED_LOCATION
164
                && NOTE_SOURCE_LOCATION (insn) == NOTE_SOURCE_LOCATION (last_note)
165
#else
166
                && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
167
                && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note)
168
#endif
169
)
170
              {
171
                delete_related_insns (insn);
172
                continue;
173
              }
174
 
175
            last_note = insn;
176
          }
177
      }
178
  return 0;
179
}
180
 
181
struct tree_opt_pass pass_purge_lineno_notes =
182
{
183
  "elnotes",                            /* name */
184
  NULL,                                 /* gate */
185
  purge_line_number_notes,              /* execute */
186
  NULL,                                 /* sub */
187
  NULL,                                 /* next */
188
  0,                                    /* static_pass_number */
189
  0,                                    /* tv_id */
190
  0,                                    /* properties_required */
191
  0,                                    /* properties_provided */
192
  0,                                    /* properties_destroyed */
193
  0,                                    /* todo_flags_start */
194
  TODO_dump_func,                       /* todo_flags_finish */
195
 
196
};
197
 
198
 
199
/* Initialize LABEL_NUSES and JUMP_LABEL fields.  Delete any REG_LABEL
200
   notes whose labels don't occur in the insn any more.  Returns the
201
   largest INSN_UID found.  */
202
static void
203
init_label_info (rtx f)
204
{
205
  rtx insn;
206
 
207
  for (insn = f; insn; insn = NEXT_INSN (insn))
208
    if (LABEL_P (insn))
209
      LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
210
    else if (JUMP_P (insn))
211
      JUMP_LABEL (insn) = 0;
212
    else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
213
      {
214
        rtx note, next;
215
 
216
        for (note = REG_NOTES (insn); note; note = next)
217
          {
218
            next = XEXP (note, 1);
219
            if (REG_NOTE_KIND (note) == REG_LABEL
220
                && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
221
              remove_note (insn, note);
222
          }
223
      }
224
}
225
 
226
/* Mark the label each jump jumps to.
227
   Combine consecutive labels, and count uses of labels.  */
228
 
229
static void
230
mark_all_labels (rtx f)
231
{
232
  rtx insn;
233
 
234
  for (insn = f; insn; insn = NEXT_INSN (insn))
235
    if (INSN_P (insn))
236
      {
237
        mark_jump_label (PATTERN (insn), insn, 0);
238
        if (! INSN_DELETED_P (insn) && JUMP_P (insn))
239
          {
240
            /* When we know the LABEL_REF contained in a REG used in
241
               an indirect jump, we'll have a REG_LABEL note so that
242
               flow can tell where it's going.  */
243
            if (JUMP_LABEL (insn) == 0)
244
              {
245
                rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
246
                if (label_note)
247
                  {
248
                    /* But a LABEL_REF around the REG_LABEL note, so
249
                       that we can canonicalize it.  */
250
                    rtx label_ref = gen_rtx_LABEL_REF (Pmode,
251
                                                       XEXP (label_note, 0));
252
 
253
                    mark_jump_label (label_ref, insn, 0);
254
                    XEXP (label_note, 0) = XEXP (label_ref, 0);
255
                    JUMP_LABEL (insn) = XEXP (label_note, 0);
256
                  }
257
              }
258
          }
259
      }
260
}
261
 
262
/* Move all block-beg, block-end and loop-beg notes between START and END out
263
   before START.  START and END may be such notes.  Returns the values of the
264
   new starting and ending insns, which may be different if the original ones
265
   were such notes.  Return true if there were only such notes and no real
266
   instructions.  */
267
 
268
bool
269
squeeze_notes (rtx* startp, rtx* endp)
270
{
271
  rtx start = *startp;
272
  rtx end = *endp;
273
 
274
  rtx insn;
275
  rtx next;
276
  rtx last = NULL;
277
  rtx past_end = NEXT_INSN (end);
278
 
279
  for (insn = start; insn != past_end; insn = next)
280
    {
281
      next = NEXT_INSN (insn);
282
      if (NOTE_P (insn)
283
          && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
284
              || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG))
285
        {
286
          /* BLOCK_BEG or BLOCK_END notes only exist in the `final' pass.  */
287
          gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
288
                      && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
289
 
290
          if (insn == start)
291
            start = next;
292
          else
293
            {
294
              rtx prev = PREV_INSN (insn);
295
              PREV_INSN (insn) = PREV_INSN (start);
296
              NEXT_INSN (insn) = start;
297
              NEXT_INSN (PREV_INSN (insn)) = insn;
298
              PREV_INSN (NEXT_INSN (insn)) = insn;
299
              NEXT_INSN (prev) = next;
300
              PREV_INSN (next) = prev;
301
            }
302
        }
303
      else
304
        last = insn;
305
    }
306
 
307
  /* There were no real instructions.  */
308
  if (start == past_end)
309
    return true;
310
 
311
  end = last;
312
 
313
  *startp = start;
314
  *endp = end;
315
  return false;
316
}
317
 
318
/* Return the label before INSN, or put a new label there.  */
319
 
320
rtx
321
get_label_before (rtx insn)
322
{
323
  rtx label;
324
 
325
  /* Find an existing label at this point
326
     or make a new one if there is none.  */
327
  label = prev_nonnote_insn (insn);
328
 
329
  if (label == 0 || !LABEL_P (label))
330
    {
331
      rtx prev = PREV_INSN (insn);
332
 
333
      label = gen_label_rtx ();
334
      emit_label_after (label, prev);
335
      LABEL_NUSES (label) = 0;
336
    }
337
  return label;
338
}
339
 
340
/* Return the label after INSN, or put a new label there.  */
341
 
342
rtx
343
get_label_after (rtx insn)
344
{
345
  rtx label;
346
 
347
  /* Find an existing label at this point
348
     or make a new one if there is none.  */
349
  label = next_nonnote_insn (insn);
350
 
351
  if (label == 0 || !LABEL_P (label))
352
    {
353
      label = gen_label_rtx ();
354
      emit_label_after (label, insn);
355
      LABEL_NUSES (label) = 0;
356
    }
357
  return label;
358
}
359
 
360
/* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
361
   of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
362
   UNKNOWN may be returned in case we are having CC_MODE compare and we don't
363
   know whether it's source is floating point or integer comparison.  Machine
364
   description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
365
   to help this function avoid overhead in these cases.  */
366
enum rtx_code
367
reversed_comparison_code_parts (enum rtx_code code, rtx arg0, rtx arg1, rtx insn)
368
{
369
  enum machine_mode mode;
370
 
371
  /* If this is not actually a comparison, we can't reverse it.  */
372
  if (GET_RTX_CLASS (code) != RTX_COMPARE
373
      && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
374
    return UNKNOWN;
375
 
376
  mode = GET_MODE (arg0);
377
  if (mode == VOIDmode)
378
    mode = GET_MODE (arg1);
379
 
380
  /* First see if machine description supplies us way to reverse the
381
     comparison.  Give it priority over everything else to allow
382
     machine description to do tricks.  */
383
  if (GET_MODE_CLASS (mode) == MODE_CC
384
      && REVERSIBLE_CC_MODE (mode))
385
    {
386
#ifdef REVERSE_CONDITION
387
      return REVERSE_CONDITION (code, mode);
388
#endif
389
      return reverse_condition (code);
390
    }
391
 
392
  /* Try a few special cases based on the comparison code.  */
393
  switch (code)
394
    {
395
    case GEU:
396
    case GTU:
397
    case LEU:
398
    case LTU:
399
    case NE:
400
    case EQ:
401
      /* It is always safe to reverse EQ and NE, even for the floating
402
         point.  Similarly the unsigned comparisons are never used for
403
         floating point so we can reverse them in the default way.  */
404
      return reverse_condition (code);
405
    case ORDERED:
406
    case UNORDERED:
407
    case LTGT:
408
    case UNEQ:
409
      /* In case we already see unordered comparison, we can be sure to
410
         be dealing with floating point so we don't need any more tests.  */
411
      return reverse_condition_maybe_unordered (code);
412
    case UNLT:
413
    case UNLE:
414
    case UNGT:
415
    case UNGE:
416
      /* We don't have safe way to reverse these yet.  */
417
      return UNKNOWN;
418
    default:
419
      break;
420
    }
421
 
422
  if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
423
    {
424
      rtx prev;
425
      /* Try to search for the comparison to determine the real mode.
426
         This code is expensive, but with sane machine description it
427
         will be never used, since REVERSIBLE_CC_MODE will return true
428
         in all cases.  */
429
      if (! insn)
430
        return UNKNOWN;
431
 
432
      for (prev = prev_nonnote_insn (insn);
433
           prev != 0 && !LABEL_P (prev);
434
           prev = prev_nonnote_insn (prev))
435
        {
436
          rtx set = set_of (arg0, prev);
437
          if (set && GET_CODE (set) == SET
438
              && rtx_equal_p (SET_DEST (set), arg0))
439
            {
440
              rtx src = SET_SRC (set);
441
 
442
              if (GET_CODE (src) == COMPARE)
443
                {
444
                  rtx comparison = src;
445
                  arg0 = XEXP (src, 0);
446
                  mode = GET_MODE (arg0);
447
                  if (mode == VOIDmode)
448
                    mode = GET_MODE (XEXP (comparison, 1));
449
                  break;
450
                }
451
              /* We can get past reg-reg moves.  This may be useful for model
452
                 of i387 comparisons that first move flag registers around.  */
453
              if (REG_P (src))
454
                {
455
                  arg0 = src;
456
                  continue;
457
                }
458
            }
459
          /* If register is clobbered in some ununderstandable way,
460
             give up.  */
461
          if (set)
462
            return UNKNOWN;
463
        }
464
    }
465
 
466
  /* Test for an integer condition, or a floating-point comparison
467
     in which NaNs can be ignored.  */
468
  if (GET_CODE (arg0) == CONST_INT
469
      || (GET_MODE (arg0) != VOIDmode
470
          && GET_MODE_CLASS (mode) != MODE_CC
471
          && !HONOR_NANS (mode)))
472
    return reverse_condition (code);
473
 
474
  return UNKNOWN;
475
}
476
 
477
/* A wrapper around the previous function to take COMPARISON as rtx
478
   expression.  This simplifies many callers.  */
479
enum rtx_code
480
reversed_comparison_code (rtx comparison, rtx insn)
481
{
482
  if (!COMPARISON_P (comparison))
483
    return UNKNOWN;
484
  return reversed_comparison_code_parts (GET_CODE (comparison),
485
                                         XEXP (comparison, 0),
486
                                         XEXP (comparison, 1), insn);
487
}
488
 
489
/* Return comparison with reversed code of EXP.
490
   Return NULL_RTX in case we fail to do the reversal.  */
491
rtx
492
reversed_comparison (rtx exp, enum machine_mode mode)
493
{
494
  enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
495
  if (reversed_code == UNKNOWN)
496
    return NULL_RTX;
497
  else
498
    return simplify_gen_relational (reversed_code, mode, VOIDmode,
499
                                    XEXP (exp, 0), XEXP (exp, 1));
500
}
501
 
502
 
503
/* Given an rtx-code for a comparison, return the code for the negated
504
   comparison.  If no such code exists, return UNKNOWN.
505
 
506
   WATCH OUT!  reverse_condition is not safe to use on a jump that might
507
   be acting on the results of an IEEE floating point comparison, because
508
   of the special treatment of non-signaling nans in comparisons.
509
   Use reversed_comparison_code instead.  */
510
 
511
enum rtx_code
512
reverse_condition (enum rtx_code code)
513
{
514
  switch (code)
515
    {
516
    case EQ:
517
      return NE;
518
    case NE:
519
      return EQ;
520
    case GT:
521
      return LE;
522
    case GE:
523
      return LT;
524
    case LT:
525
      return GE;
526
    case LE:
527
      return GT;
528
    case GTU:
529
      return LEU;
530
    case GEU:
531
      return LTU;
532
    case LTU:
533
      return GEU;
534
    case LEU:
535
      return GTU;
536
    case UNORDERED:
537
      return ORDERED;
538
    case ORDERED:
539
      return UNORDERED;
540
 
541
    case UNLT:
542
    case UNLE:
543
    case UNGT:
544
    case UNGE:
545
    case UNEQ:
546
    case LTGT:
547
      return UNKNOWN;
548
 
549
    default:
550
      gcc_unreachable ();
551
    }
552
}
553
 
554
/* Similar, but we're allowed to generate unordered comparisons, which
555
   makes it safe for IEEE floating-point.  Of course, we have to recognize
556
   that the target will support them too...  */
557
 
558
enum rtx_code
559
reverse_condition_maybe_unordered (enum rtx_code code)
560
{
561
  switch (code)
562
    {
563
    case EQ:
564
      return NE;
565
    case NE:
566
      return EQ;
567
    case GT:
568
      return UNLE;
569
    case GE:
570
      return UNLT;
571
    case LT:
572
      return UNGE;
573
    case LE:
574
      return UNGT;
575
    case LTGT:
576
      return UNEQ;
577
    case UNORDERED:
578
      return ORDERED;
579
    case ORDERED:
580
      return UNORDERED;
581
    case UNLT:
582
      return GE;
583
    case UNLE:
584
      return GT;
585
    case UNGT:
586
      return LE;
587
    case UNGE:
588
      return LT;
589
    case UNEQ:
590
      return LTGT;
591
 
592
    default:
593
      gcc_unreachable ();
594
    }
595
}
596
 
597
/* Similar, but return the code when two operands of a comparison are swapped.
598
   This IS safe for IEEE floating-point.  */
599
 
600
enum rtx_code
601
swap_condition (enum rtx_code code)
602
{
603
  switch (code)
604
    {
605
    case EQ:
606
    case NE:
607
    case UNORDERED:
608
    case ORDERED:
609
    case UNEQ:
610
    case LTGT:
611
      return code;
612
 
613
    case GT:
614
      return LT;
615
    case GE:
616
      return LE;
617
    case LT:
618
      return GT;
619
    case LE:
620
      return GE;
621
    case GTU:
622
      return LTU;
623
    case GEU:
624
      return LEU;
625
    case LTU:
626
      return GTU;
627
    case LEU:
628
      return GEU;
629
    case UNLT:
630
      return UNGT;
631
    case UNLE:
632
      return UNGE;
633
    case UNGT:
634
      return UNLT;
635
    case UNGE:
636
      return UNLE;
637
 
638
    default:
639
      gcc_unreachable ();
640
    }
641
}
642
 
643
/* Given a comparison CODE, return the corresponding unsigned comparison.
644
   If CODE is an equality comparison or already an unsigned comparison,
645
   CODE is returned.  */
646
 
647
enum rtx_code
648
unsigned_condition (enum rtx_code code)
649
{
650
  switch (code)
651
    {
652
    case EQ:
653
    case NE:
654
    case GTU:
655
    case GEU:
656
    case LTU:
657
    case LEU:
658
      return code;
659
 
660
    case GT:
661
      return GTU;
662
    case GE:
663
      return GEU;
664
    case LT:
665
      return LTU;
666
    case LE:
667
      return LEU;
668
 
669
    default:
670
      gcc_unreachable ();
671
    }
672
}
673
 
674
/* Similarly, return the signed version of a comparison.  */
675
 
676
enum rtx_code
677
signed_condition (enum rtx_code code)
678
{
679
  switch (code)
680
    {
681
    case EQ:
682
    case NE:
683
    case GT:
684
    case GE:
685
    case LT:
686
    case LE:
687
      return code;
688
 
689
    case GTU:
690
      return GT;
691
    case GEU:
692
      return GE;
693
    case LTU:
694
      return LT;
695
    case LEU:
696
      return LE;
697
 
698
    default:
699
      gcc_unreachable ();
700
    }
701
}
702
 
703
/* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
704
   truth of CODE1 implies the truth of CODE2.  */
705
 
706
int
707
comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
708
{
709
  /* UNKNOWN comparison codes can happen as a result of trying to revert
710
     comparison codes.
711
     They can't match anything, so we have to reject them here.  */
712
  if (code1 == UNKNOWN || code2 == UNKNOWN)
713
    return 0;
714
 
715
  if (code1 == code2)
716
    return 1;
717
 
718
  switch (code1)
719
    {
720
    case UNEQ:
721
      if (code2 == UNLE || code2 == UNGE)
722
        return 1;
723
      break;
724
 
725
    case EQ:
726
      if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
727
          || code2 == ORDERED)
728
        return 1;
729
      break;
730
 
731
    case UNLT:
732
      if (code2 == UNLE || code2 == NE)
733
        return 1;
734
      break;
735
 
736
    case LT:
737
      if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
738
        return 1;
739
      break;
740
 
741
    case UNGT:
742
      if (code2 == UNGE || code2 == NE)
743
        return 1;
744
      break;
745
 
746
    case GT:
747
      if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
748
        return 1;
749
      break;
750
 
751
    case GE:
752
    case LE:
753
      if (code2 == ORDERED)
754
        return 1;
755
      break;
756
 
757
    case LTGT:
758
      if (code2 == NE || code2 == ORDERED)
759
        return 1;
760
      break;
761
 
762
    case LTU:
763
      if (code2 == LEU || code2 == NE)
764
        return 1;
765
      break;
766
 
767
    case GTU:
768
      if (code2 == GEU || code2 == NE)
769
        return 1;
770
      break;
771
 
772
    case UNORDERED:
773
      if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
774
          || code2 == UNGE || code2 == UNGT)
775
        return 1;
776
      break;
777
 
778
    default:
779
      break;
780
    }
781
 
782
  return 0;
783
}
784
 
785
/* Return 1 if INSN is an unconditional jump and nothing else.  */
786
 
787
int
788
simplejump_p (rtx insn)
789
{
790
  return (JUMP_P (insn)
791
          && GET_CODE (PATTERN (insn)) == SET
792
          && GET_CODE (SET_DEST (PATTERN (insn))) == PC
793
          && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
794
}
795
 
796
/* Return nonzero if INSN is a (possibly) conditional jump
797
   and nothing more.
798
 
799
   Use of this function is deprecated, since we need to support combined
800
   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
801
 
802
int
803
condjump_p (rtx insn)
804
{
805
  rtx x = PATTERN (insn);
806
 
807
  if (GET_CODE (x) != SET
808
      || GET_CODE (SET_DEST (x)) != PC)
809
    return 0;
810
 
811
  x = SET_SRC (x);
812
  if (GET_CODE (x) == LABEL_REF)
813
    return 1;
814
  else
815
    return (GET_CODE (x) == IF_THEN_ELSE
816
            && ((GET_CODE (XEXP (x, 2)) == PC
817
                 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
818
                     || GET_CODE (XEXP (x, 1)) == RETURN))
819
                || (GET_CODE (XEXP (x, 1)) == PC
820
                    && (GET_CODE (XEXP (x, 2)) == LABEL_REF
821
                        || GET_CODE (XEXP (x, 2)) == RETURN))));
822
}
823
 
824
/* Return nonzero if INSN is a (possibly) conditional jump inside a
825
   PARALLEL.
826
 
827
   Use this function is deprecated, since we need to support combined
828
   branch and compare insns.  Use any_condjump_p instead whenever possible.  */
829
 
830
int
831
condjump_in_parallel_p (rtx insn)
832
{
833
  rtx x = PATTERN (insn);
834
 
835
  if (GET_CODE (x) != PARALLEL)
836
    return 0;
837
  else
838
    x = XVECEXP (x, 0, 0);
839
 
840
  if (GET_CODE (x) != SET)
841
    return 0;
842
  if (GET_CODE (SET_DEST (x)) != PC)
843
    return 0;
844
  if (GET_CODE (SET_SRC (x)) == LABEL_REF)
845
    return 1;
846
  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
847
    return 0;
848
  if (XEXP (SET_SRC (x), 2) == pc_rtx
849
      && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
850
          || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
851
    return 1;
852
  if (XEXP (SET_SRC (x), 1) == pc_rtx
853
      && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
854
          || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
855
    return 1;
856
  return 0;
857
}
858
 
859
/* Return set of PC, otherwise NULL.  */
860
 
861
rtx
862
pc_set (rtx insn)
863
{
864
  rtx pat;
865
  if (!JUMP_P (insn))
866
    return NULL_RTX;
867
  pat = PATTERN (insn);
868
 
869
  /* The set is allowed to appear either as the insn pattern or
870
     the first set in a PARALLEL.  */
871
  if (GET_CODE (pat) == PARALLEL)
872
    pat = XVECEXP (pat, 0, 0);
873
  if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
874
    return pat;
875
 
876
  return NULL_RTX;
877
}
878
 
879
/* Return true when insn is an unconditional direct jump,
880
   possibly bundled inside a PARALLEL.  */
881
 
882
int
883
any_uncondjump_p (rtx insn)
884
{
885
  rtx x = pc_set (insn);
886
  if (!x)
887
    return 0;
888
  if (GET_CODE (SET_SRC (x)) != LABEL_REF)
889
    return 0;
890
  if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
891
    return 0;
892
  return 1;
893
}
894
 
895
/* Return true when insn is a conditional jump.  This function works for
896
   instructions containing PC sets in PARALLELs.  The instruction may have
897
   various other effects so before removing the jump you must verify
898
   onlyjump_p.
899
 
900
   Note that unlike condjump_p it returns false for unconditional jumps.  */
901
 
902
int
903
any_condjump_p (rtx insn)
904
{
905
  rtx x = pc_set (insn);
906
  enum rtx_code a, b;
907
 
908
  if (!x)
909
    return 0;
910
  if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
911
    return 0;
912
 
913
  a = GET_CODE (XEXP (SET_SRC (x), 1));
914
  b = GET_CODE (XEXP (SET_SRC (x), 2));
915
 
916
  return ((b == PC && (a == LABEL_REF || a == RETURN))
917
          || (a == PC && (b == LABEL_REF || b == RETURN)));
918
}
919
 
920
/* Return the label of a conditional jump.  */
921
 
922
rtx
923
condjump_label (rtx insn)
924
{
925
  rtx x = pc_set (insn);
926
 
927
  if (!x)
928
    return NULL_RTX;
929
  x = SET_SRC (x);
930
  if (GET_CODE (x) == LABEL_REF)
931
    return x;
932
  if (GET_CODE (x) != IF_THEN_ELSE)
933
    return NULL_RTX;
934
  if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
935
    return XEXP (x, 1);
936
  if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
937
    return XEXP (x, 2);
938
  return NULL_RTX;
939
}
940
 
941
/* Return true if INSN is a (possibly conditional) return insn.  */
942
 
943
static int
944
returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
945
{
946
  rtx x = *loc;
947
 
948
  return x && (GET_CODE (x) == RETURN
949
               || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
950
}
951
 
952
int
953
returnjump_p (rtx insn)
954
{
955
  if (!JUMP_P (insn))
956
    return 0;
957
  return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
958
}
959
 
960
/* Return true if INSN is a jump that only transfers control and
961
   nothing more.  */
962
 
963
int
964
onlyjump_p (rtx insn)
965
{
966
  rtx set;
967
 
968
  if (!JUMP_P (insn))
969
    return 0;
970
 
971
  set = single_set (insn);
972
  if (set == NULL)
973
    return 0;
974
  if (GET_CODE (SET_DEST (set)) != PC)
975
    return 0;
976
  if (side_effects_p (SET_SRC (set)))
977
    return 0;
978
 
979
  return 1;
980
}
981
 
982
#ifdef HAVE_cc0
983
 
984
/* Return nonzero if X is an RTX that only sets the condition codes
985
   and has no side effects.  */
986
 
987
int
988
only_sets_cc0_p (rtx x)
989
{
990
  if (! x)
991
    return 0;
992
 
993
  if (INSN_P (x))
994
    x = PATTERN (x);
995
 
996
  return sets_cc0_p (x) == 1 && ! side_effects_p (x);
997
}
998
 
999
/* Return 1 if X is an RTX that does nothing but set the condition codes
1000
   and CLOBBER or USE registers.
1001
   Return -1 if X does explicitly set the condition codes,
1002
   but also does other things.  */
1003
 
1004
int
1005
sets_cc0_p (rtx x)
1006
{
1007
  if (! x)
1008
    return 0;
1009
 
1010
  if (INSN_P (x))
1011
    x = PATTERN (x);
1012
 
1013
  if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1014
    return 1;
1015
  if (GET_CODE (x) == PARALLEL)
1016
    {
1017
      int i;
1018
      int sets_cc0 = 0;
1019
      int other_things = 0;
1020
      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1021
        {
1022
          if (GET_CODE (XVECEXP (x, 0, i)) == SET
1023
              && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1024
            sets_cc0 = 1;
1025
          else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1026
            other_things = 1;
1027
        }
1028
      return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1029
    }
1030
  return 0;
1031
}
1032
#endif
1033
 
1034
/* Follow any unconditional jump at LABEL;
1035
   return the ultimate label reached by any such chain of jumps.
1036
   Return null if the chain ultimately leads to a return instruction.
1037
   If LABEL is not followed by a jump, return LABEL.
1038
   If the chain loops or we can't find end, return LABEL,
1039
   since that tells caller to avoid changing the insn.
1040
 
1041
   If RELOAD_COMPLETED is 0, we do not chain across a USE or CLOBBER.  */
1042
 
1043
rtx
1044
follow_jumps (rtx label)
1045
{
1046
  rtx insn;
1047
  rtx next;
1048
  rtx value = label;
1049
  int depth;
1050
 
1051
  for (depth = 0;
1052
       (depth < 10
1053
        && (insn = next_active_insn (value)) != 0
1054
        && JUMP_P (insn)
1055
        && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1056
             && onlyjump_p (insn))
1057
            || GET_CODE (PATTERN (insn)) == RETURN)
1058
        && (next = NEXT_INSN (insn))
1059
        && BARRIER_P (next));
1060
       depth++)
1061
    {
1062
      rtx tem;
1063
      if (!reload_completed && flag_test_coverage)
1064
        {
1065
          /* ??? Optional.  Disables some optimizations, but makes
1066
             gcov output more accurate with -O.  */
1067
          for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1068
            if (NOTE_P (tem) && NOTE_LINE_NUMBER (tem) > 0)
1069
              return value;
1070
        }
1071
 
1072
      /* If we have found a cycle, make the insn jump to itself.  */
1073
      if (JUMP_LABEL (insn) == label)
1074
        return label;
1075
 
1076
      tem = next_active_insn (JUMP_LABEL (insn));
1077
      if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1078
                  || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1079
        break;
1080
 
1081
      value = JUMP_LABEL (insn);
1082
    }
1083
  if (depth == 10)
1084
    return label;
1085
  return value;
1086
}
1087
 
1088
 
1089
/* Find all CODE_LABELs referred to in X, and increment their use counts.
1090
   If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1091
   in INSN, then store one of them in JUMP_LABEL (INSN).
1092
   If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1093
   referenced in INSN, add a REG_LABEL note containing that label to INSN.
1094
   Also, when there are consecutive labels, canonicalize on the last of them.
1095
 
1096
   Note that two labels separated by a loop-beginning note
1097
   must be kept distinct if we have not yet done loop-optimization,
1098
   because the gap between them is where loop-optimize
1099
   will want to move invariant code to.  CROSS_JUMP tells us
1100
   that loop-optimization is done with.  */
1101
 
1102
void
1103
mark_jump_label (rtx x, rtx insn, int in_mem)
1104
{
1105
  RTX_CODE code = GET_CODE (x);
1106
  int i;
1107
  const char *fmt;
1108
 
1109
  switch (code)
1110
    {
1111
    case PC:
1112
    case CC0:
1113
    case REG:
1114
    case CONST_INT:
1115
    case CONST_DOUBLE:
1116
    case CLOBBER:
1117
    case CALL:
1118
      return;
1119
 
1120
    case MEM:
1121
      in_mem = 1;
1122
      break;
1123
 
1124
    case SYMBOL_REF:
1125
      if (!in_mem)
1126
        return;
1127
 
1128
      /* If this is a constant-pool reference, see if it is a label.  */
1129
      if (CONSTANT_POOL_ADDRESS_P (x))
1130
        mark_jump_label (get_pool_constant (x), insn, in_mem);
1131
      break;
1132
 
1133
    case LABEL_REF:
1134
      {
1135
        rtx label = XEXP (x, 0);
1136
 
1137
        /* Ignore remaining references to unreachable labels that
1138
           have been deleted.  */
1139
        if (NOTE_P (label)
1140
            && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1141
          break;
1142
 
1143
        gcc_assert (LABEL_P (label));
1144
 
1145
        /* Ignore references to labels of containing functions.  */
1146
        if (LABEL_REF_NONLOCAL_P (x))
1147
          break;
1148
 
1149
        XEXP (x, 0) = label;
1150
        if (! insn || ! INSN_DELETED_P (insn))
1151
          ++LABEL_NUSES (label);
1152
 
1153
        if (insn)
1154
          {
1155
            if (JUMP_P (insn))
1156
              JUMP_LABEL (insn) = label;
1157
            else
1158
              {
1159
                /* Add a REG_LABEL note for LABEL unless there already
1160
                   is one.  All uses of a label, except for labels
1161
                   that are the targets of jumps, must have a
1162
                   REG_LABEL note.  */
1163
                if (! find_reg_note (insn, REG_LABEL, label))
1164
                  REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1165
                                                        REG_NOTES (insn));
1166
              }
1167
          }
1168
        return;
1169
      }
1170
 
1171
  /* Do walk the labels in a vector, but not the first operand of an
1172
     ADDR_DIFF_VEC.  Don't set the JUMP_LABEL of a vector.  */
1173
    case ADDR_VEC:
1174
    case ADDR_DIFF_VEC:
1175
      if (! INSN_DELETED_P (insn))
1176
        {
1177
          int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1178
 
1179
          for (i = 0; i < XVECLEN (x, eltnum); i++)
1180
            mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1181
        }
1182
      return;
1183
 
1184
    default:
1185
      break;
1186
    }
1187
 
1188
  fmt = GET_RTX_FORMAT (code);
1189
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1190
    {
1191
      if (fmt[i] == 'e')
1192
        mark_jump_label (XEXP (x, i), insn, in_mem);
1193
      else if (fmt[i] == 'E')
1194
        {
1195
          int j;
1196
          for (j = 0; j < XVECLEN (x, i); j++)
1197
            mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1198
        }
1199
    }
1200
}
1201
 
1202
/* If all INSN does is set the pc, delete it,
1203
   and delete the insn that set the condition codes for it
1204
   if that's what the previous thing was.  */
1205
 
1206
void
1207
delete_jump (rtx insn)
1208
{
1209
  rtx set = single_set (insn);
1210
 
1211
  if (set && GET_CODE (SET_DEST (set)) == PC)
1212
    delete_computation (insn);
1213
}
1214
 
1215
/* Recursively delete prior insns that compute the value (used only by INSN
1216
   which the caller is deleting) stored in the register mentioned by NOTE
1217
   which is a REG_DEAD note associated with INSN.  */
1218
 
1219
static void
1220
delete_prior_computation (rtx note, rtx insn)
1221
{
1222
  rtx our_prev;
1223
  rtx reg = XEXP (note, 0);
1224
 
1225
  for (our_prev = prev_nonnote_insn (insn);
1226
       our_prev && (NONJUMP_INSN_P (our_prev)
1227
                    || CALL_P (our_prev));
1228
       our_prev = prev_nonnote_insn (our_prev))
1229
    {
1230
      rtx pat = PATTERN (our_prev);
1231
 
1232
      /* If we reach a CALL which is not calling a const function
1233
         or the callee pops the arguments, then give up.  */
1234
      if (CALL_P (our_prev)
1235
          && (! CONST_OR_PURE_CALL_P (our_prev)
1236
              || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1237
        break;
1238
 
1239
      /* If we reach a SEQUENCE, it is too complex to try to
1240
         do anything with it, so give up.  We can be run during
1241
         and after reorg, so SEQUENCE rtl can legitimately show
1242
         up here.  */
1243
      if (GET_CODE (pat) == SEQUENCE)
1244
        break;
1245
 
1246
      if (GET_CODE (pat) == USE
1247
          && NONJUMP_INSN_P (XEXP (pat, 0)))
1248
        /* reorg creates USEs that look like this.  We leave them
1249
           alone because reorg needs them for its own purposes.  */
1250
        break;
1251
 
1252
      if (reg_set_p (reg, pat))
1253
        {
1254
          if (side_effects_p (pat) && !CALL_P (our_prev))
1255
            break;
1256
 
1257
          if (GET_CODE (pat) == PARALLEL)
1258
            {
1259
              /* If we find a SET of something else, we can't
1260
                 delete the insn.  */
1261
 
1262
              int i;
1263
 
1264
              for (i = 0; i < XVECLEN (pat, 0); i++)
1265
                {
1266
                  rtx part = XVECEXP (pat, 0, i);
1267
 
1268
                  if (GET_CODE (part) == SET
1269
                      && SET_DEST (part) != reg)
1270
                    break;
1271
                }
1272
 
1273
              if (i == XVECLEN (pat, 0))
1274
                delete_computation (our_prev);
1275
            }
1276
          else if (GET_CODE (pat) == SET
1277
                   && REG_P (SET_DEST (pat)))
1278
            {
1279
              int dest_regno = REGNO (SET_DEST (pat));
1280
              int dest_endregno
1281
                = (dest_regno
1282
                   + (dest_regno < FIRST_PSEUDO_REGISTER
1283
                      ? hard_regno_nregs[dest_regno]
1284
                                        [GET_MODE (SET_DEST (pat))] : 1));
1285
              int regno = REGNO (reg);
1286
              int endregno
1287
                = (regno
1288
                   + (regno < FIRST_PSEUDO_REGISTER
1289
                      ? hard_regno_nregs[regno][GET_MODE (reg)] : 1));
1290
 
1291
              if (dest_regno >= regno
1292
                  && dest_endregno <= endregno)
1293
                delete_computation (our_prev);
1294
 
1295
              /* We may have a multi-word hard register and some, but not
1296
                 all, of the words of the register are needed in subsequent
1297
                 insns.  Write REG_UNUSED notes for those parts that were not
1298
                 needed.  */
1299
              else if (dest_regno <= regno
1300
                       && dest_endregno >= endregno)
1301
                {
1302
                  int i;
1303
 
1304
                  REG_NOTES (our_prev)
1305
                    = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1306
                                         REG_NOTES (our_prev));
1307
 
1308
                  for (i = dest_regno; i < dest_endregno; i++)
1309
                    if (! find_regno_note (our_prev, REG_UNUSED, i))
1310
                      break;
1311
 
1312
                  if (i == dest_endregno)
1313
                    delete_computation (our_prev);
1314
                }
1315
            }
1316
 
1317
          break;
1318
        }
1319
 
1320
      /* If PAT references the register that dies here, it is an
1321
         additional use.  Hence any prior SET isn't dead.  However, this
1322
         insn becomes the new place for the REG_DEAD note.  */
1323
      if (reg_overlap_mentioned_p (reg, pat))
1324
        {
1325
          XEXP (note, 1) = REG_NOTES (our_prev);
1326
          REG_NOTES (our_prev) = note;
1327
          break;
1328
        }
1329
    }
1330
}
1331
 
1332
/* Delete INSN and recursively delete insns that compute values used only
1333
   by INSN.  This uses the REG_DEAD notes computed during flow analysis.
1334
   If we are running before flow.c, we need do nothing since flow.c will
1335
   delete dead code.  We also can't know if the registers being used are
1336
   dead or not at this point.
1337
 
1338
   Otherwise, look at all our REG_DEAD notes.  If a previous insn does
1339
   nothing other than set a register that dies in this insn, we can delete
1340
   that insn as well.
1341
 
1342
   On machines with CC0, if CC0 is used in this insn, we may be able to
1343
   delete the insn that set it.  */
1344
 
1345
static void
1346
delete_computation (rtx insn)
1347
{
1348
  rtx note, next;
1349
 
1350
#ifdef HAVE_cc0
1351
  if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1352
    {
1353
      rtx prev = prev_nonnote_insn (insn);
1354
      /* We assume that at this stage
1355
         CC's are always set explicitly
1356
         and always immediately before the jump that
1357
         will use them.  So if the previous insn
1358
         exists to set the CC's, delete it
1359
         (unless it performs auto-increments, etc.).  */
1360
      if (prev && NONJUMP_INSN_P (prev)
1361
          && sets_cc0_p (PATTERN (prev)))
1362
        {
1363
          if (sets_cc0_p (PATTERN (prev)) > 0
1364
              && ! side_effects_p (PATTERN (prev)))
1365
            delete_computation (prev);
1366
          else
1367
            /* Otherwise, show that cc0 won't be used.  */
1368
            REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1369
                                                  cc0_rtx, REG_NOTES (prev));
1370
        }
1371
    }
1372
#endif
1373
 
1374
  for (note = REG_NOTES (insn); note; note = next)
1375
    {
1376
      next = XEXP (note, 1);
1377
 
1378
      if (REG_NOTE_KIND (note) != REG_DEAD
1379
          /* Verify that the REG_NOTE is legitimate.  */
1380
          || !REG_P (XEXP (note, 0)))
1381
        continue;
1382
 
1383
      delete_prior_computation (note, insn);
1384
    }
1385
 
1386
  delete_related_insns (insn);
1387
}
1388
 
1389
/* Delete insn INSN from the chain of insns and update label ref counts
1390
   and delete insns now unreachable.
1391
 
1392
   Returns the first insn after INSN that was not deleted.
1393
 
1394
   Usage of this instruction is deprecated.  Use delete_insn instead and
1395
   subsequent cfg_cleanup pass to delete unreachable code if needed.  */
1396
 
1397
rtx
1398
delete_related_insns (rtx insn)
1399
{
1400
  int was_code_label = (LABEL_P (insn));
1401
  rtx note;
1402
  rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1403
 
1404
  while (next && INSN_DELETED_P (next))
1405
    next = NEXT_INSN (next);
1406
 
1407
  /* This insn is already deleted => return first following nondeleted.  */
1408
  if (INSN_DELETED_P (insn))
1409
    return next;
1410
 
1411
  delete_insn (insn);
1412
 
1413
  /* If instruction is followed by a barrier,
1414
     delete the barrier too.  */
1415
 
1416
  if (next != 0 && BARRIER_P (next))
1417
    delete_insn (next);
1418
 
1419
  /* If deleting a jump, decrement the count of the label,
1420
     and delete the label if it is now unused.  */
1421
 
1422
  if (JUMP_P (insn) && JUMP_LABEL (insn))
1423
    {
1424
      rtx lab = JUMP_LABEL (insn), lab_next;
1425
 
1426
      if (LABEL_NUSES (lab) == 0)
1427
        {
1428
          /* This can delete NEXT or PREV,
1429
             either directly if NEXT is JUMP_LABEL (INSN),
1430
             or indirectly through more levels of jumps.  */
1431
          delete_related_insns (lab);
1432
 
1433
          /* I feel a little doubtful about this loop,
1434
             but I see no clean and sure alternative way
1435
             to find the first insn after INSN that is not now deleted.
1436
             I hope this works.  */
1437
          while (next && INSN_DELETED_P (next))
1438
            next = NEXT_INSN (next);
1439
          return next;
1440
        }
1441
      else if (tablejump_p (insn, NULL, &lab_next))
1442
        {
1443
          /* If we're deleting the tablejump, delete the dispatch table.
1444
             We may not be able to kill the label immediately preceding
1445
             just yet, as it might be referenced in code leading up to
1446
             the tablejump.  */
1447
          delete_related_insns (lab_next);
1448
        }
1449
    }
1450
 
1451
  /* Likewise if we're deleting a dispatch table.  */
1452
 
1453
  if (JUMP_P (insn)
1454
      && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1455
          || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1456
    {
1457
      rtx pat = PATTERN (insn);
1458
      int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1459
      int len = XVECLEN (pat, diff_vec_p);
1460
 
1461
      for (i = 0; i < len; i++)
1462
        if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1463
          delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1464
      while (next && INSN_DELETED_P (next))
1465
        next = NEXT_INSN (next);
1466
      return next;
1467
    }
1468
 
1469
  /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note.  */
1470
  if (NONJUMP_INSN_P (insn) || CALL_P (insn))
1471
    for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1472
      if (REG_NOTE_KIND (note) == REG_LABEL
1473
          /* This could also be a NOTE_INSN_DELETED_LABEL note.  */
1474
          && LABEL_P (XEXP (note, 0)))
1475
        if (LABEL_NUSES (XEXP (note, 0)) == 0)
1476
          delete_related_insns (XEXP (note, 0));
1477
 
1478
  while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
1479
    prev = PREV_INSN (prev);
1480
 
1481
  /* If INSN was a label and a dispatch table follows it,
1482
     delete the dispatch table.  The tablejump must have gone already.
1483
     It isn't useful to fall through into a table.  */
1484
 
1485
  if (was_code_label
1486
      && NEXT_INSN (insn) != 0
1487
      && JUMP_P (NEXT_INSN (insn))
1488
      && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1489
          || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1490
    next = delete_related_insns (NEXT_INSN (insn));
1491
 
1492
  /* If INSN was a label, delete insns following it if now unreachable.  */
1493
 
1494
  if (was_code_label && prev && BARRIER_P (prev))
1495
    {
1496
      enum rtx_code code;
1497
      while (next)
1498
        {
1499
          code = GET_CODE (next);
1500
          if (code == NOTE
1501
              && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1502
            next = NEXT_INSN (next);
1503
          /* Keep going past other deleted labels to delete what follows.  */
1504
          else if (code == CODE_LABEL && INSN_DELETED_P (next))
1505
            next = NEXT_INSN (next);
1506
          else if (code == BARRIER || INSN_P (next))
1507
            /* Note: if this deletes a jump, it can cause more
1508
               deletion of unreachable code, after a different label.
1509
               As long as the value from this recursive call is correct,
1510
               this invocation functions correctly.  */
1511
            next = delete_related_insns (next);
1512
          else
1513
            break;
1514
        }
1515
    }
1516
 
1517
  return next;
1518
}
1519
 
1520
/* Delete a range of insns from FROM to TO, inclusive.
1521
   This is for the sake of peephole optimization, so assume
1522
   that whatever these insns do will still be done by a new
1523
   peephole insn that will replace them.  */
1524
 
1525
void
1526
delete_for_peephole (rtx from, rtx to)
1527
{
1528
  rtx insn = from;
1529
 
1530
  while (1)
1531
    {
1532
      rtx next = NEXT_INSN (insn);
1533
      rtx prev = PREV_INSN (insn);
1534
 
1535
      if (!NOTE_P (insn))
1536
        {
1537
          INSN_DELETED_P (insn) = 1;
1538
 
1539
          /* Patch this insn out of the chain.  */
1540
          /* We don't do this all at once, because we
1541
             must preserve all NOTEs.  */
1542
          if (prev)
1543
            NEXT_INSN (prev) = next;
1544
 
1545
          if (next)
1546
            PREV_INSN (next) = prev;
1547
        }
1548
 
1549
      if (insn == to)
1550
        break;
1551
      insn = next;
1552
    }
1553
 
1554
  /* Note that if TO is an unconditional jump
1555
     we *do not* delete the BARRIER that follows,
1556
     since the peephole that replaces this sequence
1557
     is also an unconditional jump in that case.  */
1558
}
1559
 
1560
/* Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
1561
   NLABEL as a return.  Accrue modifications into the change group.  */
1562
 
1563
static void
1564
redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1565
{
1566
  rtx x = *loc;
1567
  RTX_CODE code = GET_CODE (x);
1568
  int i;
1569
  const char *fmt;
1570
 
1571
  if (code == LABEL_REF)
1572
    {
1573
      if (XEXP (x, 0) == olabel)
1574
        {
1575
          rtx n;
1576
          if (nlabel)
1577
            n = gen_rtx_LABEL_REF (Pmode, nlabel);
1578
          else
1579
            n = gen_rtx_RETURN (VOIDmode);
1580
 
1581
          validate_change (insn, loc, n, 1);
1582
          return;
1583
        }
1584
    }
1585
  else if (code == RETURN && olabel == 0)
1586
    {
1587
      if (nlabel)
1588
        x = gen_rtx_LABEL_REF (Pmode, nlabel);
1589
      else
1590
        x = gen_rtx_RETURN (VOIDmode);
1591
      if (loc == &PATTERN (insn))
1592
        x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1593
      validate_change (insn, loc, x, 1);
1594
      return;
1595
    }
1596
 
1597
  if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1598
      && GET_CODE (SET_SRC (x)) == LABEL_REF
1599
      && XEXP (SET_SRC (x), 0) == olabel)
1600
    {
1601
      validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1602
      return;
1603
    }
1604
 
1605
  fmt = GET_RTX_FORMAT (code);
1606
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1607
    {
1608
      if (fmt[i] == 'e')
1609
        redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1610
      else if (fmt[i] == 'E')
1611
        {
1612
          int j;
1613
          for (j = 0; j < XVECLEN (x, i); j++)
1614
            redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1615
        }
1616
    }
1617
}
1618
 
1619
/* Make JUMP go to NLABEL instead of where it jumps now.  Accrue
1620
   the modifications into the change group.  Return false if we did
1621
   not see how to do that.  */
1622
 
1623
int
1624
redirect_jump_1 (rtx jump, rtx nlabel)
1625
{
1626
  int ochanges = num_validated_changes ();
1627
  rtx *loc;
1628
 
1629
  if (GET_CODE (PATTERN (jump)) == PARALLEL)
1630
    loc = &XVECEXP (PATTERN (jump), 0, 0);
1631
  else
1632
    loc = &PATTERN (jump);
1633
 
1634
  redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1635
  return num_validated_changes () > ochanges;
1636
}
1637
 
1638
/* Make JUMP go to NLABEL instead of where it jumps now.  If the old
1639
   jump target label is unused as a result, it and the code following
1640
   it may be deleted.
1641
 
1642
   If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1643
   RETURN insn.
1644
 
1645
   The return value will be 1 if the change was made, 0 if it wasn't
1646
   (this can only occur for NLABEL == 0).  */
1647
 
1648
int
1649
redirect_jump (rtx jump, rtx nlabel, int delete_unused)
1650
{
1651
  rtx olabel = JUMP_LABEL (jump);
1652
 
1653
  if (nlabel == olabel)
1654
    return 1;
1655
 
1656
  if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1657
    return 0;
1658
 
1659
  redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1660
  return 1;
1661
}
1662
 
1663
/* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1664
   NLABEL in JUMP.  If DELETE_UNUSED is non-negative, copy a
1665
   NOTE_INSN_FUNCTION_END found after OLABEL to the place after NLABEL.
1666
   If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1667
   count has dropped to zero.  */
1668
void
1669
redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
1670
                 int invert)
1671
{
1672
  rtx note;
1673
 
1674
  JUMP_LABEL (jump) = nlabel;
1675
  if (nlabel)
1676
    ++LABEL_NUSES (nlabel);
1677
 
1678
  /* Update labels in any REG_EQUAL note.  */
1679
  if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1680
    {
1681
      if (!nlabel || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1682
        remove_note (jump, note);
1683
      else
1684
        {
1685
          redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1686
          confirm_change_group ();
1687
        }
1688
    }
1689
 
1690
  /* If we're eliding the jump over exception cleanups at the end of a
1691
     function, move the function end note so that -Wreturn-type works.  */
1692
  if (olabel && nlabel
1693
      && NEXT_INSN (olabel)
1694
      && NOTE_P (NEXT_INSN (olabel))
1695
      && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END
1696
      && delete_unused >= 0)
1697
    emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
1698
 
1699
  if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1700
      /* Undefined labels will remain outside the insn stream.  */
1701
      && INSN_UID (olabel))
1702
    delete_related_insns (olabel);
1703
  if (invert)
1704
    invert_br_probabilities (jump);
1705
}
1706
 
1707
/* Invert the jump condition X contained in jump insn INSN.  Accrue the
1708
   modifications into the change group.  Return nonzero for success.  */
1709
static int
1710
invert_exp_1 (rtx x, rtx insn)
1711
{
1712
  RTX_CODE code = GET_CODE (x);
1713
 
1714
  if (code == IF_THEN_ELSE)
1715
    {
1716
      rtx comp = XEXP (x, 0);
1717
      rtx tem;
1718
      enum rtx_code reversed_code;
1719
 
1720
      /* We can do this in two ways:  The preferable way, which can only
1721
         be done if this is not an integer comparison, is to reverse
1722
         the comparison code.  Otherwise, swap the THEN-part and ELSE-part
1723
         of the IF_THEN_ELSE.  If we can't do either, fail.  */
1724
 
1725
      reversed_code = reversed_comparison_code (comp, insn);
1726
 
1727
      if (reversed_code != UNKNOWN)
1728
        {
1729
          validate_change (insn, &XEXP (x, 0),
1730
                           gen_rtx_fmt_ee (reversed_code,
1731
                                           GET_MODE (comp), XEXP (comp, 0),
1732
                                           XEXP (comp, 1)),
1733
                           1);
1734
          return 1;
1735
        }
1736
 
1737
      tem = XEXP (x, 1);
1738
      validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1739
      validate_change (insn, &XEXP (x, 2), tem, 1);
1740
      return 1;
1741
    }
1742
  else
1743
    return 0;
1744
}
1745
 
1746
/* Invert the condition of the jump JUMP, and make it jump to label
1747
   NLABEL instead of where it jumps now.  Accrue changes into the
1748
   change group.  Return false if we didn't see how to perform the
1749
   inversion and redirection.  */
1750
 
1751
int
1752
invert_jump_1 (rtx jump, rtx nlabel)
1753
{
1754
  rtx x = pc_set (jump);
1755
  int ochanges;
1756
  int ok;
1757
 
1758
  ochanges = num_validated_changes ();
1759
  gcc_assert (x);
1760
  ok = invert_exp_1 (SET_SRC (x), jump);
1761
  gcc_assert (ok);
1762
 
1763
  if (num_validated_changes () == ochanges)
1764
    return 0;
1765
 
1766
  /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1767
     in Pmode, so checking this is not merely an optimization.  */
1768
  return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1769
}
1770
 
1771
/* Invert the condition of the jump JUMP, and make it jump to label
1772
   NLABEL instead of where it jumps now.  Return true if successful.  */
1773
 
1774
int
1775
invert_jump (rtx jump, rtx nlabel, int delete_unused)
1776
{
1777
  rtx olabel = JUMP_LABEL (jump);
1778
 
1779
  if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1780
    {
1781
      redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1782
      return 1;
1783
    }
1784
  cancel_changes (0);
1785
  return 0;
1786
}
1787
 
1788
 
1789
/* Like rtx_equal_p except that it considers two REGs as equal
1790
   if they renumber to the same value and considers two commutative
1791
   operations to be the same if the order of the operands has been
1792
   reversed.  */
1793
 
1794
int
1795
rtx_renumbered_equal_p (rtx x, rtx y)
1796
{
1797
  int i;
1798
  enum rtx_code code = GET_CODE (x);
1799
  const char *fmt;
1800
 
1801
  if (x == y)
1802
    return 1;
1803
 
1804
  if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1805
      && (REG_P (y) || (GET_CODE (y) == SUBREG
1806
                                  && REG_P (SUBREG_REG (y)))))
1807
    {
1808
      int reg_x = -1, reg_y = -1;
1809
      int byte_x = 0, byte_y = 0;
1810
 
1811
      if (GET_MODE (x) != GET_MODE (y))
1812
        return 0;
1813
 
1814
      /* If we haven't done any renumbering, don't
1815
         make any assumptions.  */
1816
      if (reg_renumber == 0)
1817
        return rtx_equal_p (x, y);
1818
 
1819
      if (code == SUBREG)
1820
        {
1821
          reg_x = REGNO (SUBREG_REG (x));
1822
          byte_x = SUBREG_BYTE (x);
1823
 
1824
          if (reg_renumber[reg_x] >= 0)
1825
            {
1826
              reg_x = subreg_regno_offset (reg_renumber[reg_x],
1827
                                           GET_MODE (SUBREG_REG (x)),
1828
                                           byte_x,
1829
                                           GET_MODE (x));
1830
              byte_x = 0;
1831
            }
1832
        }
1833
      else
1834
        {
1835
          reg_x = REGNO (x);
1836
          if (reg_renumber[reg_x] >= 0)
1837
            reg_x = reg_renumber[reg_x];
1838
        }
1839
 
1840
      if (GET_CODE (y) == SUBREG)
1841
        {
1842
          reg_y = REGNO (SUBREG_REG (y));
1843
          byte_y = SUBREG_BYTE (y);
1844
 
1845
          if (reg_renumber[reg_y] >= 0)
1846
            {
1847
              reg_y = subreg_regno_offset (reg_renumber[reg_y],
1848
                                           GET_MODE (SUBREG_REG (y)),
1849
                                           byte_y,
1850
                                           GET_MODE (y));
1851
              byte_y = 0;
1852
            }
1853
        }
1854
      else
1855
        {
1856
          reg_y = REGNO (y);
1857
          if (reg_renumber[reg_y] >= 0)
1858
            reg_y = reg_renumber[reg_y];
1859
        }
1860
 
1861
      return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1862
    }
1863
 
1864
  /* Now we have disposed of all the cases
1865
     in which different rtx codes can match.  */
1866
  if (code != GET_CODE (y))
1867
    return 0;
1868
 
1869
  switch (code)
1870
    {
1871
    case PC:
1872
    case CC0:
1873
    case ADDR_VEC:
1874
    case ADDR_DIFF_VEC:
1875
    case CONST_INT:
1876
    case CONST_DOUBLE:
1877
      return 0;
1878
 
1879
    case LABEL_REF:
1880
      /* We can't assume nonlocal labels have their following insns yet.  */
1881
      if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1882
        return XEXP (x, 0) == XEXP (y, 0);
1883
 
1884
      /* Two label-refs are equivalent if they point at labels
1885
         in the same position in the instruction stream.  */
1886
      return (next_real_insn (XEXP (x, 0))
1887
              == next_real_insn (XEXP (y, 0)));
1888
 
1889
    case SYMBOL_REF:
1890
      return XSTR (x, 0) == XSTR (y, 0);
1891
 
1892
    case CODE_LABEL:
1893
      /* If we didn't match EQ equality above, they aren't the same.  */
1894
      return 0;
1895
 
1896
    default:
1897
      break;
1898
    }
1899
 
1900
  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
1901
 
1902
  if (GET_MODE (x) != GET_MODE (y))
1903
    return 0;
1904
 
1905
  /* For commutative operations, the RTX match if the operand match in any
1906
     order.  Also handle the simple binary and unary cases without a loop.  */
1907
  if (targetm.commutative_p (x, UNKNOWN))
1908
    return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1909
             && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1910
            || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1911
                && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1912
  else if (NON_COMMUTATIVE_P (x))
1913
    return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1914
            && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1915
  else if (UNARY_P (x))
1916
    return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1917
 
1918
  /* Compare the elements.  If any pair of corresponding elements
1919
     fail to match, return 0 for the whole things.  */
1920
 
1921
  fmt = GET_RTX_FORMAT (code);
1922
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1923
    {
1924
      int j;
1925
      switch (fmt[i])
1926
        {
1927
        case 'w':
1928
          if (XWINT (x, i) != XWINT (y, i))
1929
            return 0;
1930
          break;
1931
 
1932
        case 'i':
1933
          if (XINT (x, i) != XINT (y, i))
1934
            return 0;
1935
          break;
1936
 
1937
        case 't':
1938
          if (XTREE (x, i) != XTREE (y, i))
1939
            return 0;
1940
          break;
1941
 
1942
        case 's':
1943
          if (strcmp (XSTR (x, i), XSTR (y, i)))
1944
            return 0;
1945
          break;
1946
 
1947
        case 'e':
1948
          if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1949
            return 0;
1950
          break;
1951
 
1952
        case 'u':
1953
          if (XEXP (x, i) != XEXP (y, i))
1954
            return 0;
1955
          /* Fall through.  */
1956
        case '0':
1957
          break;
1958
 
1959
        case 'E':
1960
          if (XVECLEN (x, i) != XVECLEN (y, i))
1961
            return 0;
1962
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1963
            if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1964
              return 0;
1965
          break;
1966
 
1967
        default:
1968
          gcc_unreachable ();
1969
        }
1970
    }
1971
  return 1;
1972
}
1973
 
1974
/* If X is a hard register or equivalent to one or a subregister of one,
1975
   return the hard register number.  If X is a pseudo register that was not
1976
   assigned a hard register, return the pseudo register number.  Otherwise,
1977
   return -1.  Any rtx is valid for X.  */
1978
 
1979
int
1980
true_regnum (rtx x)
1981
{
1982
  if (REG_P (x))
1983
    {
1984
      if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
1985
        return reg_renumber[REGNO (x)];
1986
      return REGNO (x);
1987
    }
1988
  if (GET_CODE (x) == SUBREG)
1989
    {
1990
      int base = true_regnum (SUBREG_REG (x));
1991
      if (base >= 0
1992
          && base < FIRST_PSEUDO_REGISTER
1993
          && subreg_offset_representable_p (REGNO (SUBREG_REG (x)),
1994
                                            GET_MODE (SUBREG_REG (x)),
1995
                                            SUBREG_BYTE (x), GET_MODE (x)))
1996
        return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
1997
                                           GET_MODE (SUBREG_REG (x)),
1998
                                           SUBREG_BYTE (x), GET_MODE (x));
1999
    }
2000
  return -1;
2001
}
2002
 
2003
/* Return regno of the register REG and handle subregs too.  */
2004
unsigned int
2005
reg_or_subregno (rtx reg)
2006
{
2007
  if (GET_CODE (reg) == SUBREG)
2008
    reg = SUBREG_REG (reg);
2009
  gcc_assert (REG_P (reg));
2010
  return REGNO (reg);
2011
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.