OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [resource.c] - Blame information for rev 775

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Definitions for computing resource usage of specific insns.
2
   Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008,
3
   2009, 2010 Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 3, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "diagnostic-core.h"
26
#include "rtl.h"
27
#include "tm_p.h"
28
#include "hard-reg-set.h"
29
#include "function.h"
30
#include "regs.h"
31
#include "flags.h"
32
#include "output.h"
33
#include "resource.h"
34
#include "except.h"
35
#include "insn-attr.h"
36
#include "params.h"
37
#include "df.h"
38
 
39
/* This structure is used to record liveness information at the targets or
40
   fallthrough insns of branches.  We will most likely need the information
41
   at targets again, so save them in a hash table rather than recomputing them
42
   each time.  */
43
 
44
struct target_info
45
{
46
  int uid;                      /* INSN_UID of target.  */
47
  struct target_info *next;     /* Next info for same hash bucket.  */
48
  HARD_REG_SET live_regs;       /* Registers live at target.  */
49
  int block;                    /* Basic block number containing target.  */
50
  int bb_tick;                  /* Generation count of basic block info.  */
51
};
52
 
53
#define TARGET_HASH_PRIME 257
54
 
55
/* Indicates what resources are required at the beginning of the epilogue.  */
56
static struct resources start_of_epilogue_needs;
57
 
58
/* Indicates what resources are required at function end.  */
59
static struct resources end_of_function_needs;
60
 
61
/* Define the hash table itself.  */
62
static struct target_info **target_hash_table = NULL;
63
 
64
/* For each basic block, we maintain a generation number of its basic
65
   block info, which is updated each time we move an insn from the
66
   target of a jump.  This is the generation number indexed by block
67
   number.  */
68
 
69
static int *bb_ticks;
70
 
71
/* Marks registers possibly live at the current place being scanned by
72
   mark_target_live_regs.  Also used by update_live_status.  */
73
 
74
static HARD_REG_SET current_live_regs;
75
 
76
/* Marks registers for which we have seen a REG_DEAD note but no assignment.
77
   Also only used by the next two functions.  */
78
 
79
static HARD_REG_SET pending_dead_regs;
80
 
81
static void update_live_status (rtx, const_rtx, void *);
82
static int find_basic_block (rtx, int);
83
static rtx next_insn_no_annul (rtx);
84
static rtx find_dead_or_set_registers (rtx, struct resources*,
85
                                       rtx*, int, struct resources,
86
                                       struct resources);
87
 
88
/* Utility function called from mark_target_live_regs via note_stores.
89
   It deadens any CLOBBERed registers and livens any SET registers.  */
90
 
91
static void
92
update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
93
{
94
  int first_regno, last_regno;
95
  int i;
96
 
97
  if (!REG_P (dest)
98
      && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
99
    return;
100
 
101
  if (GET_CODE (dest) == SUBREG)
102
    {
103
      first_regno = subreg_regno (dest);
104
      last_regno = first_regno + subreg_nregs (dest);
105
 
106
    }
107
  else
108
    {
109
      first_regno = REGNO (dest);
110
      last_regno = END_HARD_REGNO (dest);
111
    }
112
 
113
  if (GET_CODE (x) == CLOBBER)
114
    for (i = first_regno; i < last_regno; i++)
115
      CLEAR_HARD_REG_BIT (current_live_regs, i);
116
  else
117
    for (i = first_regno; i < last_regno; i++)
118
      {
119
        SET_HARD_REG_BIT (current_live_regs, i);
120
        CLEAR_HARD_REG_BIT (pending_dead_regs, i);
121
      }
122
}
123
 
124
/* Find the number of the basic block with correct live register
125
   information that starts closest to INSN.  Return -1 if we couldn't
126
   find such a basic block or the beginning is more than
127
   SEARCH_LIMIT instructions before INSN.  Use SEARCH_LIMIT = -1 for
128
   an unlimited search.
129
 
130
   The delay slot filling code destroys the control-flow graph so,
131
   instead of finding the basic block containing INSN, we search
132
   backwards toward a BARRIER where the live register information is
133
   correct.  */
134
 
135
static int
136
find_basic_block (rtx insn, int search_limit)
137
{
138
  /* Scan backwards to the previous BARRIER.  Then see if we can find a
139
     label that starts a basic block.  Return the basic block number.  */
140
  for (insn = prev_nonnote_insn (insn);
141
       insn && !BARRIER_P (insn) && search_limit != 0;
142
       insn = prev_nonnote_insn (insn), --search_limit)
143
    ;
144
 
145
  /* The closest BARRIER is too far away.  */
146
  if (search_limit == 0)
147
    return -1;
148
 
149
  /* The start of the function.  */
150
  else if (insn == 0)
151
    return ENTRY_BLOCK_PTR->next_bb->index;
152
 
153
  /* See if any of the upcoming CODE_LABELs start a basic block.  If we reach
154
     anything other than a CODE_LABEL or note, we can't find this code.  */
155
  for (insn = next_nonnote_insn (insn);
156
       insn && LABEL_P (insn);
157
       insn = next_nonnote_insn (insn))
158
    if (BLOCK_FOR_INSN (insn))
159
      return BLOCK_FOR_INSN (insn)->index;
160
 
161
  return -1;
162
}
163
 
164
/* Similar to next_insn, but ignores insns in the delay slots of
165
   an annulled branch.  */
166
 
167
static rtx
168
next_insn_no_annul (rtx insn)
169
{
170
  if (insn)
171
    {
172
      /* If INSN is an annulled branch, skip any insns from the target
173
         of the branch.  */
174
      if (JUMP_P (insn)
175
          && INSN_ANNULLED_BRANCH_P (insn)
176
          && NEXT_INSN (PREV_INSN (insn)) != insn)
177
        {
178
          rtx next = NEXT_INSN (insn);
179
          enum rtx_code code = GET_CODE (next);
180
 
181
          while ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
182
                 && INSN_FROM_TARGET_P (next))
183
            {
184
              insn = next;
185
              next = NEXT_INSN (insn);
186
              code = GET_CODE (next);
187
            }
188
        }
189
 
190
      insn = NEXT_INSN (insn);
191
      if (insn && NONJUMP_INSN_P (insn)
192
          && GET_CODE (PATTERN (insn)) == SEQUENCE)
193
        insn = XVECEXP (PATTERN (insn), 0, 0);
194
    }
195
 
196
  return insn;
197
}
198
 
199
/* Given X, some rtl, and RES, a pointer to a `struct resource', mark
200
   which resources are referenced by the insn.  If INCLUDE_DELAYED_EFFECTS
201
   is TRUE, resources used by the called routine will be included for
202
   CALL_INSNs.  */
203
 
204
void
205
mark_referenced_resources (rtx x, struct resources *res,
206
                           bool include_delayed_effects)
207
{
208
  enum rtx_code code = GET_CODE (x);
209
  int i, j;
210
  unsigned int r;
211
  const char *format_ptr;
212
 
213
  /* Handle leaf items for which we set resource flags.  Also, special-case
214
     CALL, SET and CLOBBER operators.  */
215
  switch (code)
216
    {
217
    case CONST:
218
    case CONST_INT:
219
    case CONST_DOUBLE:
220
    case CONST_FIXED:
221
    case CONST_VECTOR:
222
    case PC:
223
    case SYMBOL_REF:
224
    case LABEL_REF:
225
      return;
226
 
227
    case SUBREG:
228
      if (!REG_P (SUBREG_REG (x)))
229
        mark_referenced_resources (SUBREG_REG (x), res, false);
230
      else
231
        {
232
          unsigned int regno = subreg_regno (x);
233
          unsigned int last_regno = regno + subreg_nregs (x);
234
 
235
          gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
236
          for (r = regno; r < last_regno; r++)
237
            SET_HARD_REG_BIT (res->regs, r);
238
        }
239
      return;
240
 
241
    case REG:
242
      gcc_assert (HARD_REGISTER_P (x));
243
      add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
244
      return;
245
 
246
    case MEM:
247
      /* If this memory shouldn't change, it really isn't referencing
248
         memory.  */
249
      if (MEM_READONLY_P (x))
250
        res->unch_memory = 1;
251
      else
252
        res->memory = 1;
253
      res->volatil |= MEM_VOLATILE_P (x);
254
 
255
      /* Mark registers used to access memory.  */
256
      mark_referenced_resources (XEXP (x, 0), res, false);
257
      return;
258
 
259
    case CC0:
260
      res->cc = 1;
261
      return;
262
 
263
    case UNSPEC_VOLATILE:
264
    case TRAP_IF:
265
    case ASM_INPUT:
266
      /* Traditional asm's are always volatile.  */
267
      res->volatil = 1;
268
      break;
269
 
270
    case ASM_OPERANDS:
271
      res->volatil |= MEM_VOLATILE_P (x);
272
 
273
      /* For all ASM_OPERANDS, we must traverse the vector of input operands.
274
         We can not just fall through here since then we would be confused
275
         by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
276
         traditional asms unlike their normal usage.  */
277
 
278
      for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
279
        mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false);
280
      return;
281
 
282
    case CALL:
283
      /* The first operand will be a (MEM (xxx)) but doesn't really reference
284
         memory.  The second operand may be referenced, though.  */
285
      mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false);
286
      mark_referenced_resources (XEXP (x, 1), res, false);
287
      return;
288
 
289
    case SET:
290
      /* Usually, the first operand of SET is set, not referenced.  But
291
         registers used to access memory are referenced.  SET_DEST is
292
         also referenced if it is a ZERO_EXTRACT.  */
293
 
294
      mark_referenced_resources (SET_SRC (x), res, false);
295
 
296
      x = SET_DEST (x);
297
      if (GET_CODE (x) == ZERO_EXTRACT
298
          || GET_CODE (x) == STRICT_LOW_PART)
299
        mark_referenced_resources (x, res, false);
300
      else if (GET_CODE (x) == SUBREG)
301
        x = SUBREG_REG (x);
302
      if (MEM_P (x))
303
        mark_referenced_resources (XEXP (x, 0), res, false);
304
      return;
305
 
306
    case CLOBBER:
307
      return;
308
 
309
    case CALL_INSN:
310
      if (include_delayed_effects)
311
        {
312
          /* A CALL references memory, the frame pointer if it exists, the
313
             stack pointer, any global registers and any registers given in
314
             USE insns immediately in front of the CALL.
315
 
316
             However, we may have moved some of the parameter loading insns
317
             into the delay slot of this CALL.  If so, the USE's for them
318
             don't count and should be skipped.  */
319
          rtx insn = PREV_INSN (x);
320
          rtx sequence = 0;
321
          int seq_size = 0;
322
          int i;
323
 
324
          /* If we are part of a delay slot sequence, point at the SEQUENCE.  */
325
          if (NEXT_INSN (insn) != x)
326
            {
327
              sequence = PATTERN (NEXT_INSN (insn));
328
              seq_size = XVECLEN (sequence, 0);
329
              gcc_assert (GET_CODE (sequence) == SEQUENCE);
330
            }
331
 
332
          res->memory = 1;
333
          SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
334
          if (frame_pointer_needed)
335
            {
336
              SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
337
#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
338
              SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
339
#endif
340
            }
341
 
342
          for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
343
            if (global_regs[i])
344
              SET_HARD_REG_BIT (res->regs, i);
345
 
346
          /* Check for a REG_SETJMP.  If it exists, then we must
347
             assume that this call can need any register.
348
 
349
             This is done to be more conservative about how we handle setjmp.
350
             We assume that they both use and set all registers.  Using all
351
             registers ensures that a register will not be considered dead
352
             just because it crosses a setjmp call.  A register should be
353
             considered dead only if the setjmp call returns nonzero.  */
354
          if (find_reg_note (x, REG_SETJMP, NULL))
355
            SET_HARD_REG_SET (res->regs);
356
 
357
          {
358
            rtx link;
359
 
360
            for (link = CALL_INSN_FUNCTION_USAGE (x);
361
                 link;
362
                 link = XEXP (link, 1))
363
              if (GET_CODE (XEXP (link, 0)) == USE)
364
                {
365
                  for (i = 1; i < seq_size; i++)
366
                    {
367
                      rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
368
                      if (GET_CODE (slot_pat) == SET
369
                          && rtx_equal_p (SET_DEST (slot_pat),
370
                                          XEXP (XEXP (link, 0), 0)))
371
                        break;
372
                    }
373
                  if (i >= seq_size)
374
                    mark_referenced_resources (XEXP (XEXP (link, 0), 0),
375
                                               res, false);
376
                }
377
          }
378
        }
379
 
380
      /* ... fall through to other INSN processing ...  */
381
 
382
    case INSN:
383
    case JUMP_INSN:
384
 
385
#ifdef INSN_REFERENCES_ARE_DELAYED
386
      if (! include_delayed_effects
387
          && INSN_REFERENCES_ARE_DELAYED (x))
388
        return;
389
#endif
390
 
391
      /* No special processing, just speed up.  */
392
      mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
393
      return;
394
 
395
    default:
396
      break;
397
    }
398
 
399
  /* Process each sub-expression and flag what it needs.  */
400
  format_ptr = GET_RTX_FORMAT (code);
401
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
402
    switch (*format_ptr++)
403
      {
404
      case 'e':
405
        mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
406
        break;
407
 
408
      case 'E':
409
        for (j = 0; j < XVECLEN (x, i); j++)
410
          mark_referenced_resources (XVECEXP (x, i, j), res,
411
                                     include_delayed_effects);
412
        break;
413
      }
414
}
415
 
416
/* A subroutine of mark_target_live_regs.  Search forward from TARGET
417
   looking for registers that are set before they are used.  These are dead.
418
   Stop after passing a few conditional jumps, and/or a small
419
   number of unconditional branches.  */
420
 
421
static rtx
422
find_dead_or_set_registers (rtx target, struct resources *res,
423
                            rtx *jump_target, int jump_count,
424
                            struct resources set, struct resources needed)
425
{
426
  HARD_REG_SET scratch;
427
  rtx insn, next;
428
  rtx jump_insn = 0;
429
  int i;
430
 
431
  for (insn = target; insn; insn = next)
432
    {
433
      rtx this_jump_insn = insn;
434
 
435
      next = NEXT_INSN (insn);
436
 
437
      /* If this instruction can throw an exception, then we don't
438
         know where we might end up next.  That means that we have to
439
         assume that whatever we have already marked as live really is
440
         live.  */
441
      if (can_throw_internal (insn))
442
        break;
443
 
444
      switch (GET_CODE (insn))
445
        {
446
        case CODE_LABEL:
447
          /* After a label, any pending dead registers that weren't yet
448
             used can be made dead.  */
449
          AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
450
          AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
451
          CLEAR_HARD_REG_SET (pending_dead_regs);
452
 
453
          continue;
454
 
455
        case BARRIER:
456
        case NOTE:
457
          continue;
458
 
459
        case INSN:
460
          if (GET_CODE (PATTERN (insn)) == USE)
461
            {
462
              /* If INSN is a USE made by update_block, we care about the
463
                 underlying insn.  Any registers set by the underlying insn
464
                 are live since the insn is being done somewhere else.  */
465
              if (INSN_P (XEXP (PATTERN (insn), 0)))
466
                mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
467
                                    MARK_SRC_DEST_CALL);
468
 
469
              /* All other USE insns are to be ignored.  */
470
              continue;
471
            }
472
          else if (GET_CODE (PATTERN (insn)) == CLOBBER)
473
            continue;
474
          else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
475
            {
476
              /* An unconditional jump can be used to fill the delay slot
477
                 of a call, so search for a JUMP_INSN in any position.  */
478
              for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
479
                {
480
                  this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
481
                  if (JUMP_P (this_jump_insn))
482
                    break;
483
                }
484
            }
485
 
486
        default:
487
          break;
488
        }
489
 
490
      if (JUMP_P (this_jump_insn))
491
        {
492
          if (jump_count++ < 10)
493
            {
494
              if (any_uncondjump_p (this_jump_insn)
495
                  || ANY_RETURN_P (PATTERN (this_jump_insn)))
496
                {
497
                  next = JUMP_LABEL (this_jump_insn);
498
                  if (ANY_RETURN_P (next))
499
                    next = NULL_RTX;
500
                  if (jump_insn == 0)
501
                    {
502
                      jump_insn = insn;
503
                      if (jump_target)
504
                        *jump_target = JUMP_LABEL (this_jump_insn);
505
                    }
506
                }
507
              else if (any_condjump_p (this_jump_insn))
508
                {
509
                  struct resources target_set, target_res;
510
                  struct resources fallthrough_res;
511
 
512
                  /* We can handle conditional branches here by following
513
                     both paths, and then IOR the results of the two paths
514
                     together, which will give us registers that are dead
515
                     on both paths.  Since this is expensive, we give it
516
                     a much higher cost than unconditional branches.  The
517
                     cost was chosen so that we will follow at most 1
518
                     conditional branch.  */
519
 
520
                  jump_count += 4;
521
                  if (jump_count >= 10)
522
                    break;
523
 
524
                  mark_referenced_resources (insn, &needed, true);
525
 
526
                  /* For an annulled branch, mark_set_resources ignores slots
527
                     filled by instructions from the target.  This is correct
528
                     if the branch is not taken.  Since we are following both
529
                     paths from the branch, we must also compute correct info
530
                     if the branch is taken.  We do this by inverting all of
531
                     the INSN_FROM_TARGET_P bits, calling mark_set_resources,
532
                     and then inverting the INSN_FROM_TARGET_P bits again.  */
533
 
534
                  if (GET_CODE (PATTERN (insn)) == SEQUENCE
535
                      && INSN_ANNULLED_BRANCH_P (this_jump_insn))
536
                    {
537
                      for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
538
                        INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
539
                          = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
540
 
541
                      target_set = set;
542
                      mark_set_resources (insn, &target_set, 0,
543
                                          MARK_SRC_DEST_CALL);
544
 
545
                      for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
546
                        INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
547
                          = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
548
 
549
                      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
550
                    }
551
                  else
552
                    {
553
                      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
554
                      target_set = set;
555
                    }
556
 
557
                  target_res = *res;
558
                  COPY_HARD_REG_SET (scratch, target_set.regs);
559
                  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
560
                  AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
561
 
562
                  fallthrough_res = *res;
563
                  COPY_HARD_REG_SET (scratch, set.regs);
564
                  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
565
                  AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
566
 
567
                  if (!ANY_RETURN_P (JUMP_LABEL (this_jump_insn)))
568
                    find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
569
                                                &target_res, 0, jump_count,
570
                                                target_set, needed);
571
                  find_dead_or_set_registers (next,
572
                                              &fallthrough_res, 0, jump_count,
573
                                              set, needed);
574
                  IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
575
                  AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
576
                  break;
577
                }
578
              else
579
                break;
580
            }
581
          else
582
            {
583
              /* Don't try this optimization if we expired our jump count
584
                 above, since that would mean there may be an infinite loop
585
                 in the function being compiled.  */
586
              jump_insn = 0;
587
              break;
588
            }
589
        }
590
 
591
      mark_referenced_resources (insn, &needed, true);
592
      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
593
 
594
      COPY_HARD_REG_SET (scratch, set.regs);
595
      AND_COMPL_HARD_REG_SET (scratch, needed.regs);
596
      AND_COMPL_HARD_REG_SET (res->regs, scratch);
597
    }
598
 
599
  return jump_insn;
600
}
601
 
602
/* Given X, a part of an insn, and a pointer to a `struct resource',
603
   RES, indicate which resources are modified by the insn. If
604
   MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
605
   set by the called routine.
606
 
607
   If IN_DEST is nonzero, it means we are inside a SET.  Otherwise,
608
   objects are being referenced instead of set.
609
 
610
   We never mark the insn as modifying the condition code unless it explicitly
611
   SETs CC0 even though this is not totally correct.  The reason for this is
612
   that we require a SET of CC0 to immediately precede the reference to CC0.
613
   So if some other insn sets CC0 as a side-effect, we know it cannot affect
614
   our computation and thus may be placed in a delay slot.  */
615
 
616
void
617
mark_set_resources (rtx x, struct resources *res, int in_dest,
618
                    enum mark_resource_type mark_type)
619
{
620
  enum rtx_code code;
621
  int i, j;
622
  unsigned int r;
623
  const char *format_ptr;
624
 
625
 restart:
626
 
627
  code = GET_CODE (x);
628
 
629
  switch (code)
630
    {
631
    case NOTE:
632
    case BARRIER:
633
    case CODE_LABEL:
634
    case USE:
635
    case CONST_INT:
636
    case CONST_DOUBLE:
637
    case CONST_FIXED:
638
    case CONST_VECTOR:
639
    case LABEL_REF:
640
    case SYMBOL_REF:
641
    case CONST:
642
    case PC:
643
      /* These don't set any resources.  */
644
      return;
645
 
646
    case CC0:
647
      if (in_dest)
648
        res->cc = 1;
649
      return;
650
 
651
    case CALL_INSN:
652
      /* Called routine modifies the condition code, memory, any registers
653
         that aren't saved across calls, global registers and anything
654
         explicitly CLOBBERed immediately after the CALL_INSN.  */
655
 
656
      if (mark_type == MARK_SRC_DEST_CALL)
657
        {
658
          rtx link;
659
 
660
          res->cc = res->memory = 1;
661
 
662
          IOR_HARD_REG_SET (res->regs, regs_invalidated_by_call);
663
 
664
          for (link = CALL_INSN_FUNCTION_USAGE (x);
665
               link; link = XEXP (link, 1))
666
            if (GET_CODE (XEXP (link, 0)) == CLOBBER)
667
              mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
668
                                  MARK_SRC_DEST);
669
 
670
          /* Check for a REG_SETJMP.  If it exists, then we must
671
             assume that this call can clobber any register.  */
672
          if (find_reg_note (x, REG_SETJMP, NULL))
673
            SET_HARD_REG_SET (res->regs);
674
        }
675
 
676
      /* ... and also what its RTL says it modifies, if anything.  */
677
 
678
    case JUMP_INSN:
679
    case INSN:
680
 
681
        /* An insn consisting of just a CLOBBER (or USE) is just for flow
682
           and doesn't actually do anything, so we ignore it.  */
683
 
684
#ifdef INSN_SETS_ARE_DELAYED
685
      if (mark_type != MARK_SRC_DEST_CALL
686
          && INSN_SETS_ARE_DELAYED (x))
687
        return;
688
#endif
689
 
690
      x = PATTERN (x);
691
      if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
692
        goto restart;
693
      return;
694
 
695
    case SET:
696
      /* If the source of a SET is a CALL, this is actually done by
697
         the called routine.  So only include it if we are to include the
698
         effects of the calling routine.  */
699
 
700
      mark_set_resources (SET_DEST (x), res,
701
                          (mark_type == MARK_SRC_DEST_CALL
702
                           || GET_CODE (SET_SRC (x)) != CALL),
703
                          mark_type);
704
 
705
      mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
706
      return;
707
 
708
    case CLOBBER:
709
      mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
710
      return;
711
 
712
    case SEQUENCE:
713
      {
714
        rtx control = XVECEXP (x, 0, 0);
715
        bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control);
716
 
717
        mark_set_resources (control, res, 0, mark_type);
718
        for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
719
          {
720
            rtx elt = XVECEXP (x, 0, i);
721
            if (!annul_p && INSN_FROM_TARGET_P (elt))
722
              mark_set_resources (elt, res, 0, mark_type);
723
          }
724
      }
725
      return;
726
 
727
    case POST_INC:
728
    case PRE_INC:
729
    case POST_DEC:
730
    case PRE_DEC:
731
      mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
732
      return;
733
 
734
    case PRE_MODIFY:
735
    case POST_MODIFY:
736
      mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
737
      mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
738
      mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
739
      return;
740
 
741
    case SIGN_EXTRACT:
742
    case ZERO_EXTRACT:
743
      mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
744
      mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
745
      mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
746
      return;
747
 
748
    case MEM:
749
      if (in_dest)
750
        {
751
          res->memory = 1;
752
          res->unch_memory |= MEM_READONLY_P (x);
753
          res->volatil |= MEM_VOLATILE_P (x);
754
        }
755
 
756
      mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
757
      return;
758
 
759
    case SUBREG:
760
      if (in_dest)
761
        {
762
          if (!REG_P (SUBREG_REG (x)))
763
            mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
764
          else
765
            {
766
              unsigned int regno = subreg_regno (x);
767
              unsigned int last_regno = regno + subreg_nregs (x);
768
 
769
              gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
770
              for (r = regno; r < last_regno; r++)
771
                SET_HARD_REG_BIT (res->regs, r);
772
            }
773
        }
774
      return;
775
 
776
    case REG:
777
      if (in_dest)
778
        {
779
          gcc_assert (HARD_REGISTER_P (x));
780
          add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
781
        }
782
      return;
783
 
784
    case UNSPEC_VOLATILE:
785
    case ASM_INPUT:
786
      /* Traditional asm's are always volatile.  */
787
      res->volatil = 1;
788
      return;
789
 
790
    case TRAP_IF:
791
      res->volatil = 1;
792
      break;
793
 
794
    case ASM_OPERANDS:
795
      res->volatil |= MEM_VOLATILE_P (x);
796
 
797
      /* For all ASM_OPERANDS, we must traverse the vector of input operands.
798
         We can not just fall through here since then we would be confused
799
         by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
800
         traditional asms unlike their normal usage.  */
801
 
802
      for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
803
        mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
804
                            MARK_SRC_DEST);
805
      return;
806
 
807
    default:
808
      break;
809
    }
810
 
811
  /* Process each sub-expression and flag what it needs.  */
812
  format_ptr = GET_RTX_FORMAT (code);
813
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
814
    switch (*format_ptr++)
815
      {
816
      case 'e':
817
        mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
818
        break;
819
 
820
      case 'E':
821
        for (j = 0; j < XVECLEN (x, i); j++)
822
          mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
823
        break;
824
      }
825
}
826
 
827
/* Return TRUE if INSN is a return, possibly with a filled delay slot.  */
828
 
829
static bool
830
return_insn_p (const_rtx insn)
831
{
832
  if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
833
    return true;
834
 
835
  if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
836
    return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
837
 
838
  return false;
839
}
840
 
841
/* Set the resources that are live at TARGET.
842
 
843
   If TARGET is zero, we refer to the end of the current function and can
844
   return our precomputed value.
845
 
846
   Otherwise, we try to find out what is live by consulting the basic block
847
   information.  This is tricky, because we must consider the actions of
848
   reload and jump optimization, which occur after the basic block information
849
   has been computed.
850
 
851
   Accordingly, we proceed as follows::
852
 
853
   We find the previous BARRIER and look at all immediately following labels
854
   (with no intervening active insns) to see if any of them start a basic
855
   block.  If we hit the start of the function first, we use block 0.
856
 
857
   Once we have found a basic block and a corresponding first insn, we can
858
   accurately compute the live status (by starting at a label following a
859
   BARRIER, we are immune to actions taken by reload and jump.)  Then we
860
   scan all insns between that point and our target.  For each CLOBBER (or
861
   for call-clobbered regs when we pass a CALL_INSN), mark the appropriate
862
   registers are dead.  For a SET, mark them as live.
863
 
864
   We have to be careful when using REG_DEAD notes because they are not
865
   updated by such things as find_equiv_reg.  So keep track of registers
866
   marked as dead that haven't been assigned to, and mark them dead at the
867
   next CODE_LABEL since reload and jump won't propagate values across labels.
868
 
869
   If we cannot find the start of a basic block (should be a very rare
870
   case, if it can happen at all), mark everything as potentially live.
871
 
872
   Next, scan forward from TARGET looking for things set or clobbered
873
   before they are used.  These are not live.
874
 
875
   Because we can be called many times on the same target, save our results
876
   in a hash table indexed by INSN_UID.  This is only done if the function
877
   init_resource_info () was invoked before we are called.  */
878
 
879
void
880
mark_target_live_regs (rtx insns, rtx target, struct resources *res)
881
{
882
  int b = -1;
883
  unsigned int i;
884
  struct target_info *tinfo = NULL;
885
  rtx insn;
886
  rtx jump_insn = 0;
887
  rtx jump_target;
888
  HARD_REG_SET scratch;
889
  struct resources set, needed;
890
 
891
  /* Handle end of function.  */
892
  if (target == 0 || ANY_RETURN_P (target))
893
    {
894
      *res = end_of_function_needs;
895
      return;
896
    }
897
 
898
  /* Handle return insn.  */
899
  else if (return_insn_p (target))
900
    {
901
      *res = end_of_function_needs;
902
      mark_referenced_resources (target, res, false);
903
      return;
904
    }
905
 
906
  /* We have to assume memory is needed, but the CC isn't.  */
907
  res->memory = 1;
908
  res->volatil = res->unch_memory = 0;
909
  res->cc = 0;
910
 
911
  /* See if we have computed this value already.  */
912
  if (target_hash_table != NULL)
913
    {
914
      for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
915
           tinfo; tinfo = tinfo->next)
916
        if (tinfo->uid == INSN_UID (target))
917
          break;
918
 
919
      /* Start by getting the basic block number.  If we have saved
920
         information, we can get it from there unless the insn at the
921
         start of the basic block has been deleted.  */
922
      if (tinfo && tinfo->block != -1
923
          && ! INSN_DELETED_P (BB_HEAD (BASIC_BLOCK (tinfo->block))))
924
        b = tinfo->block;
925
    }
926
 
927
  if (b == -1)
928
    b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH);
929
 
930
  if (target_hash_table != NULL)
931
    {
932
      if (tinfo)
933
        {
934
          /* If the information is up-to-date, use it.  Otherwise, we will
935
             update it below.  */
936
          if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
937
            {
938
              COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
939
              return;
940
            }
941
        }
942
      else
943
        {
944
          /* Allocate a place to put our results and chain it into the
945
             hash table.  */
946
          tinfo = XNEW (struct target_info);
947
          tinfo->uid = INSN_UID (target);
948
          tinfo->block = b;
949
          tinfo->next
950
            = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
951
          target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
952
        }
953
    }
954
 
955
  CLEAR_HARD_REG_SET (pending_dead_regs);
956
 
957
  /* If we found a basic block, get the live registers from it and update
958
     them with anything set or killed between its start and the insn before
959
     TARGET; this custom life analysis is really about registers so we need
960
     to use the LR problem.  Otherwise, we must assume everything is live.  */
961
  if (b != -1)
962
    {
963
      regset regs_live = DF_LR_IN (BASIC_BLOCK (b));
964
      rtx start_insn, stop_insn;
965
 
966
      /* Compute hard regs live at start of block.  */
967
      REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
968
 
969
      /* Get starting and ending insn, handling the case where each might
970
         be a SEQUENCE.  */
971
      start_insn = (b == ENTRY_BLOCK_PTR->next_bb->index ?
972
                    insns : BB_HEAD (BASIC_BLOCK (b)));
973
      stop_insn = target;
974
 
975
      if (NONJUMP_INSN_P (start_insn)
976
          && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
977
        start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
978
 
979
      if (NONJUMP_INSN_P (stop_insn)
980
          && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
981
        stop_insn = next_insn (PREV_INSN (stop_insn));
982
 
983
      for (insn = start_insn; insn != stop_insn;
984
           insn = next_insn_no_annul (insn))
985
        {
986
          rtx link;
987
          rtx real_insn = insn;
988
          enum rtx_code code = GET_CODE (insn);
989
 
990
          if (DEBUG_INSN_P (insn))
991
            continue;
992
 
993
          /* If this insn is from the target of a branch, it isn't going to
994
             be used in the sequel.  If it is used in both cases, this
995
             test will not be true.  */
996
          if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
997
              && INSN_FROM_TARGET_P (insn))
998
            continue;
999
 
1000
          /* If this insn is a USE made by update_block, we care about the
1001
             underlying insn.  */
1002
          if (code == INSN && GET_CODE (PATTERN (insn)) == USE
1003
              && INSN_P (XEXP (PATTERN (insn), 0)))
1004
              real_insn = XEXP (PATTERN (insn), 0);
1005
 
1006
          if (CALL_P (real_insn))
1007
            {
1008
              /* CALL clobbers all call-used regs that aren't fixed except
1009
                 sp, ap, and fp.  Do this before setting the result of the
1010
                 call live.  */
1011
              AND_COMPL_HARD_REG_SET (current_live_regs,
1012
                                      regs_invalidated_by_call);
1013
 
1014
              /* A CALL_INSN sets any global register live, since it may
1015
                 have been modified by the call.  */
1016
              for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1017
                if (global_regs[i])
1018
                  SET_HARD_REG_BIT (current_live_regs, i);
1019
            }
1020
 
1021
          /* Mark anything killed in an insn to be deadened at the next
1022
             label.  Ignore USE insns; the only REG_DEAD notes will be for
1023
             parameters.  But they might be early.  A CALL_INSN will usually
1024
             clobber registers used for parameters.  It isn't worth bothering
1025
             with the unlikely case when it won't.  */
1026
          if ((NONJUMP_INSN_P (real_insn)
1027
               && GET_CODE (PATTERN (real_insn)) != USE
1028
               && GET_CODE (PATTERN (real_insn)) != CLOBBER)
1029
              || JUMP_P (real_insn)
1030
              || CALL_P (real_insn))
1031
            {
1032
              for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1033
                if (REG_NOTE_KIND (link) == REG_DEAD
1034
                    && REG_P (XEXP (link, 0))
1035
                    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1036
                  add_to_hard_reg_set (&pending_dead_regs,
1037
                                      GET_MODE (XEXP (link, 0)),
1038
                                      REGNO (XEXP (link, 0)));
1039
 
1040
              note_stores (PATTERN (real_insn), update_live_status, NULL);
1041
 
1042
              /* If any registers were unused after this insn, kill them.
1043
                 These notes will always be accurate.  */
1044
              for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1045
                if (REG_NOTE_KIND (link) == REG_UNUSED
1046
                    && REG_P (XEXP (link, 0))
1047
                    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1048
                  remove_from_hard_reg_set (&current_live_regs,
1049
                                           GET_MODE (XEXP (link, 0)),
1050
                                           REGNO (XEXP (link, 0)));
1051
            }
1052
 
1053
          else if (LABEL_P (real_insn))
1054
            {
1055
              basic_block bb;
1056
 
1057
              /* A label clobbers the pending dead registers since neither
1058
                 reload nor jump will propagate a value across a label.  */
1059
              AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
1060
              CLEAR_HARD_REG_SET (pending_dead_regs);
1061
 
1062
              /* We must conservatively assume that all registers that used
1063
                 to be live here still are.  The fallthrough edge may have
1064
                 left a live register uninitialized.  */
1065
              bb = BLOCK_FOR_INSN (real_insn);
1066
              if (bb)
1067
                {
1068
                  HARD_REG_SET extra_live;
1069
 
1070
                  REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb));
1071
                  IOR_HARD_REG_SET (current_live_regs, extra_live);
1072
                }
1073
            }
1074
 
1075
          /* The beginning of the epilogue corresponds to the end of the
1076
             RTL chain when there are no epilogue insns.  Certain resources
1077
             are implicitly required at that point.  */
1078
          else if (NOTE_P (real_insn)
1079
                   && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1080
            IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
1081
        }
1082
 
1083
      COPY_HARD_REG_SET (res->regs, current_live_regs);
1084
      if (tinfo != NULL)
1085
        {
1086
          tinfo->block = b;
1087
          tinfo->bb_tick = bb_ticks[b];
1088
        }
1089
    }
1090
  else
1091
    /* We didn't find the start of a basic block.  Assume everything
1092
       in use.  This should happen only extremely rarely.  */
1093
    SET_HARD_REG_SET (res->regs);
1094
 
1095
  CLEAR_RESOURCE (&set);
1096
  CLEAR_RESOURCE (&needed);
1097
 
1098
  jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
1099
                                          set, needed);
1100
 
1101
  /* If we hit an unconditional branch, we have another way of finding out
1102
     what is live: we can see what is live at the branch target and include
1103
     anything used but not set before the branch.  We add the live
1104
     resources found using the test below to those found until now.  */
1105
 
1106
  if (jump_insn)
1107
    {
1108
      struct resources new_resources;
1109
      rtx stop_insn = next_active_insn (jump_insn);
1110
 
1111
      if (!ANY_RETURN_P (jump_target))
1112
        jump_target = next_active_insn (jump_target);
1113
      mark_target_live_regs (insns, jump_target, &new_resources);
1114
      CLEAR_RESOURCE (&set);
1115
      CLEAR_RESOURCE (&needed);
1116
 
1117
      /* Include JUMP_INSN in the needed registers.  */
1118
      for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1119
        {
1120
          mark_referenced_resources (insn, &needed, true);
1121
 
1122
          COPY_HARD_REG_SET (scratch, needed.regs);
1123
          AND_COMPL_HARD_REG_SET (scratch, set.regs);
1124
          IOR_HARD_REG_SET (new_resources.regs, scratch);
1125
 
1126
          mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1127
        }
1128
 
1129
      IOR_HARD_REG_SET (res->regs, new_resources.regs);
1130
    }
1131
 
1132
  if (tinfo != NULL)
1133
    {
1134
      COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
1135
    }
1136
}
1137
 
1138
/* Initialize the resources required by mark_target_live_regs ().
1139
   This should be invoked before the first call to mark_target_live_regs.  */
1140
 
1141
void
1142
init_resource_info (rtx epilogue_insn)
1143
{
1144
  int i;
1145
  basic_block bb;
1146
 
1147
  /* Indicate what resources are required to be valid at the end of the current
1148
     function.  The condition code never is and memory always is.
1149
     The stack pointer is needed unless EXIT_IGNORE_STACK is true
1150
     and there is an epilogue that restores the original stack pointer
1151
     from the frame pointer.  Registers used to return the function value
1152
     are needed.  Registers holding global variables are needed.  */
1153
 
1154
  end_of_function_needs.cc = 0;
1155
  end_of_function_needs.memory = 1;
1156
  end_of_function_needs.unch_memory = 0;
1157
  CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1158
 
1159
  if (frame_pointer_needed)
1160
    {
1161
      SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1162
#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
1163
      SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
1164
#endif
1165
    }
1166
  if (!(frame_pointer_needed
1167
        && EXIT_IGNORE_STACK
1168
        && epilogue_insn
1169
        && !current_function_sp_is_unchanging))
1170
    SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1171
 
1172
  if (crtl->return_rtx != 0)
1173
    mark_referenced_resources (crtl->return_rtx,
1174
                               &end_of_function_needs, true);
1175
 
1176
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1177
    if (global_regs[i]
1178
#ifdef EPILOGUE_USES
1179
        || EPILOGUE_USES (i)
1180
#endif
1181
        )
1182
      SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1183
 
1184
  /* The registers required to be live at the end of the function are
1185
     represented in the flow information as being dead just prior to
1186
     reaching the end of the function.  For example, the return of a value
1187
     might be represented by a USE of the return register immediately
1188
     followed by an unconditional jump to the return label where the
1189
     return label is the end of the RTL chain.  The end of the RTL chain
1190
     is then taken to mean that the return register is live.
1191
 
1192
     This sequence is no longer maintained when epilogue instructions are
1193
     added to the RTL chain.  To reconstruct the original meaning, the
1194
     start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1195
     point where these registers become live (start_of_epilogue_needs).
1196
     If epilogue instructions are present, the registers set by those
1197
     instructions won't have been processed by flow.  Thus, those
1198
     registers are additionally required at the end of the RTL chain
1199
     (end_of_function_needs).  */
1200
 
1201
  start_of_epilogue_needs = end_of_function_needs;
1202
 
1203
  while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1204
    {
1205
      mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1206
                          MARK_SRC_DEST_CALL);
1207
      if (return_insn_p (epilogue_insn))
1208
        break;
1209
    }
1210
 
1211
  /* Allocate and initialize the tables used by mark_target_live_regs.  */
1212
  target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
1213
  bb_ticks = XCNEWVEC (int, last_basic_block);
1214
 
1215
  /* Set the BLOCK_FOR_INSN of each label that starts a basic block.  */
1216
  FOR_EACH_BB (bb)
1217
    if (LABEL_P (BB_HEAD (bb)))
1218
      BLOCK_FOR_INSN (BB_HEAD (bb)) = bb;
1219
}
1220
 
1221
/* Free up the resources allocated to mark_target_live_regs ().  This
1222
   should be invoked after the last call to mark_target_live_regs ().  */
1223
 
1224
void
1225
free_resource_info (void)
1226
{
1227
  basic_block bb;
1228
 
1229
  if (target_hash_table != NULL)
1230
    {
1231
      int i;
1232
 
1233
      for (i = 0; i < TARGET_HASH_PRIME; ++i)
1234
        {
1235
          struct target_info *ti = target_hash_table[i];
1236
 
1237
          while (ti)
1238
            {
1239
              struct target_info *next = ti->next;
1240
              free (ti);
1241
              ti = next;
1242
            }
1243
        }
1244
 
1245
      free (target_hash_table);
1246
      target_hash_table = NULL;
1247
    }
1248
 
1249
  if (bb_ticks != NULL)
1250
    {
1251
      free (bb_ticks);
1252
      bb_ticks = NULL;
1253
    }
1254
 
1255
  FOR_EACH_BB (bb)
1256
    if (LABEL_P (BB_HEAD (bb)))
1257
      BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL;
1258
}
1259
 
1260
/* Clear any hashed information that we have stored for INSN.  */
1261
 
1262
void
1263
clear_hashed_info_for_insn (rtx insn)
1264
{
1265
  struct target_info *tinfo;
1266
 
1267
  if (target_hash_table != NULL)
1268
    {
1269
      for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1270
           tinfo; tinfo = tinfo->next)
1271
        if (tinfo->uid == INSN_UID (insn))
1272
          break;
1273
 
1274
      if (tinfo)
1275
        tinfo->block = -1;
1276
    }
1277
}
1278
 
1279
/* Increment the tick count for the basic block that contains INSN.  */
1280
 
1281
void
1282
incr_ticks_for_insn (rtx insn)
1283
{
1284
  int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH);
1285
 
1286
  if (b != -1)
1287
    bb_ticks[b]++;
1288
}
1289
 
1290
/* Add TRIAL to the set of resources used at the end of the current
1291
   function.  */
1292
void
1293
mark_end_of_function_resources (rtx trial, bool include_delayed_effects)
1294
{
1295
  mark_referenced_resources (trial, &end_of_function_needs,
1296
                             include_delayed_effects);
1297
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.