OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [resource.c] - Blame information for rev 20

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Definitions for computing resource usage of specific insns.
2
   Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 2, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING.  If not, write to the Free
19
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20
02110-1301, USA.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "toplev.h"
27
#include "rtl.h"
28
#include "tm_p.h"
29
#include "hard-reg-set.h"
30
#include "function.h"
31
#include "regs.h"
32
#include "flags.h"
33
#include "output.h"
34
#include "resource.h"
35
#include "except.h"
36
#include "insn-attr.h"
37
#include "params.h"
38
 
39
/* This structure is used to record liveness information at the targets or
40
   fallthrough insns of branches.  We will most likely need the information
41
   at targets again, so save them in a hash table rather than recomputing them
42
   each time.  */
43
 
44
struct target_info
45
{
46
  int uid;                      /* INSN_UID of target.  */
47
  struct target_info *next;     /* Next info for same hash bucket.  */
48
  HARD_REG_SET live_regs;       /* Registers live at target.  */
49
  int block;                    /* Basic block number containing target.  */
50
  int bb_tick;                  /* Generation count of basic block info.  */
51
};
52
 
53
#define TARGET_HASH_PRIME 257
54
 
55
/* Indicates what resources are required at the beginning of the epilogue.  */
56
static struct resources start_of_epilogue_needs;
57
 
58
/* Indicates what resources are required at function end.  */
59
static struct resources end_of_function_needs;
60
 
61
/* Define the hash table itself.  */
62
static struct target_info **target_hash_table = NULL;
63
 
64
/* For each basic block, we maintain a generation number of its basic
65
   block info, which is updated each time we move an insn from the
66
   target of a jump.  This is the generation number indexed by block
67
   number.  */
68
 
69
static int *bb_ticks;
70
 
71
/* Marks registers possibly live at the current place being scanned by
72
   mark_target_live_regs.  Also used by update_live_status.  */
73
 
74
static HARD_REG_SET current_live_regs;
75
 
76
/* Marks registers for which we have seen a REG_DEAD note but no assignment.
77
   Also only used by the next two functions.  */
78
 
79
static HARD_REG_SET pending_dead_regs;
80
 
81
static void update_live_status (rtx, rtx, void *);
82
static int find_basic_block (rtx, int);
83
static rtx next_insn_no_annul (rtx);
84
static rtx find_dead_or_set_registers (rtx, struct resources*,
85
                                       rtx*, int, struct resources,
86
                                       struct resources);
87
 
88
/* Utility function called from mark_target_live_regs via note_stores.
89
   It deadens any CLOBBERed registers and livens any SET registers.  */
90
 
91
static void
92
update_live_status (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
93
{
94
  int first_regno, last_regno;
95
  int i;
96
 
97
  if (!REG_P (dest)
98
      && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
99
    return;
100
 
101
  if (GET_CODE (dest) == SUBREG)
102
    first_regno = subreg_regno (dest);
103
  else
104
    first_regno = REGNO (dest);
105
 
106
  last_regno = first_regno + hard_regno_nregs[first_regno][GET_MODE (dest)];
107
 
108
  if (GET_CODE (x) == CLOBBER)
109
    for (i = first_regno; i < last_regno; i++)
110
      CLEAR_HARD_REG_BIT (current_live_regs, i);
111
  else
112
    for (i = first_regno; i < last_regno; i++)
113
      {
114
        SET_HARD_REG_BIT (current_live_regs, i);
115
        CLEAR_HARD_REG_BIT (pending_dead_regs, i);
116
      }
117
}
118
 
119
/* Find the number of the basic block with correct live register
120
   information that starts closest to INSN.  Return -1 if we couldn't
121
   find such a basic block or the beginning is more than
122
   SEARCH_LIMIT instructions before INSN.  Use SEARCH_LIMIT = -1 for
123
   an unlimited search.
124
 
125
   The delay slot filling code destroys the control-flow graph so,
126
   instead of finding the basic block containing INSN, we search
127
   backwards toward a BARRIER where the live register information is
128
   correct.  */
129
 
130
static int
131
find_basic_block (rtx insn, int search_limit)
132
{
133
  basic_block bb;
134
 
135
  /* Scan backwards to the previous BARRIER.  Then see if we can find a
136
     label that starts a basic block.  Return the basic block number.  */
137
  for (insn = prev_nonnote_insn (insn);
138
       insn && !BARRIER_P (insn) && search_limit != 0;
139
       insn = prev_nonnote_insn (insn), --search_limit)
140
    ;
141
 
142
  /* The closest BARRIER is too far away.  */
143
  if (search_limit == 0)
144
    return -1;
145
 
146
  /* The start of the function.  */
147
  else if (insn == 0)
148
    return ENTRY_BLOCK_PTR->next_bb->index;
149
 
150
  /* See if any of the upcoming CODE_LABELs start a basic block.  If we reach
151
     anything other than a CODE_LABEL or note, we can't find this code.  */
152
  for (insn = next_nonnote_insn (insn);
153
       insn && LABEL_P (insn);
154
       insn = next_nonnote_insn (insn))
155
    {
156
      FOR_EACH_BB (bb)
157
        if (insn == BB_HEAD (bb))
158
          return bb->index;
159
    }
160
 
161
  return -1;
162
}
163
 
164
/* Similar to next_insn, but ignores insns in the delay slots of
165
   an annulled branch.  */
166
 
167
static rtx
168
next_insn_no_annul (rtx insn)
169
{
170
  if (insn)
171
    {
172
      /* If INSN is an annulled branch, skip any insns from the target
173
         of the branch.  */
174
      if (INSN_P (insn)
175
          && INSN_ANNULLED_BRANCH_P (insn)
176
          && NEXT_INSN (PREV_INSN (insn)) != insn)
177
        {
178
          rtx next = NEXT_INSN (insn);
179
          enum rtx_code code = GET_CODE (next);
180
 
181
          while ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
182
                 && INSN_FROM_TARGET_P (next))
183
            {
184
              insn = next;
185
              next = NEXT_INSN (insn);
186
              code = GET_CODE (next);
187
            }
188
        }
189
 
190
      insn = NEXT_INSN (insn);
191
      if (insn && NONJUMP_INSN_P (insn)
192
          && GET_CODE (PATTERN (insn)) == SEQUENCE)
193
        insn = XVECEXP (PATTERN (insn), 0, 0);
194
    }
195
 
196
  return insn;
197
}
198
 
199
/* Given X, some rtl, and RES, a pointer to a `struct resource', mark
200
   which resources are referenced by the insn.  If INCLUDE_DELAYED_EFFECTS
201
   is TRUE, resources used by the called routine will be included for
202
   CALL_INSNs.  */
203
 
204
void
205
mark_referenced_resources (rtx x, struct resources *res,
206
                           int include_delayed_effects)
207
{
208
  enum rtx_code code = GET_CODE (x);
209
  int i, j;
210
  unsigned int r;
211
  const char *format_ptr;
212
 
213
  /* Handle leaf items for which we set resource flags.  Also, special-case
214
     CALL, SET and CLOBBER operators.  */
215
  switch (code)
216
    {
217
    case CONST:
218
    case CONST_INT:
219
    case CONST_DOUBLE:
220
    case CONST_VECTOR:
221
    case PC:
222
    case SYMBOL_REF:
223
    case LABEL_REF:
224
      return;
225
 
226
    case SUBREG:
227
      if (!REG_P (SUBREG_REG (x)))
228
        mark_referenced_resources (SUBREG_REG (x), res, 0);
229
      else
230
        {
231
          unsigned int regno = subreg_regno (x);
232
          unsigned int last_regno
233
            = regno + hard_regno_nregs[regno][GET_MODE (x)];
234
 
235
          gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
236
          for (r = regno; r < last_regno; r++)
237
            SET_HARD_REG_BIT (res->regs, r);
238
        }
239
      return;
240
 
241
    case REG:
242
        {
243
          unsigned int regno = REGNO (x);
244
          unsigned int last_regno
245
            = regno + hard_regno_nregs[regno][GET_MODE (x)];
246
 
247
          gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
248
          for (r = regno; r < last_regno; r++)
249
            SET_HARD_REG_BIT (res->regs, r);
250
        }
251
      return;
252
 
253
    case MEM:
254
      /* If this memory shouldn't change, it really isn't referencing
255
         memory.  */
256
      if (MEM_READONLY_P (x))
257
        res->unch_memory = 1;
258
      else
259
        res->memory = 1;
260
      res->volatil |= MEM_VOLATILE_P (x);
261
 
262
      /* Mark registers used to access memory.  */
263
      mark_referenced_resources (XEXP (x, 0), res, 0);
264
      return;
265
 
266
    case CC0:
267
      res->cc = 1;
268
      return;
269
 
270
    case UNSPEC_VOLATILE:
271
    case ASM_INPUT:
272
      /* Traditional asm's are always volatile.  */
273
      res->volatil = 1;
274
      return;
275
 
276
    case TRAP_IF:
277
      res->volatil = 1;
278
      break;
279
 
280
    case ASM_OPERANDS:
281
      res->volatil |= MEM_VOLATILE_P (x);
282
 
283
      /* For all ASM_OPERANDS, we must traverse the vector of input operands.
284
         We can not just fall through here since then we would be confused
285
         by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
286
         traditional asms unlike their normal usage.  */
287
 
288
      for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
289
        mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
290
      return;
291
 
292
    case CALL:
293
      /* The first operand will be a (MEM (xxx)) but doesn't really reference
294
         memory.  The second operand may be referenced, though.  */
295
      mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
296
      mark_referenced_resources (XEXP (x, 1), res, 0);
297
      return;
298
 
299
    case SET:
300
      /* Usually, the first operand of SET is set, not referenced.  But
301
         registers used to access memory are referenced.  SET_DEST is
302
         also referenced if it is a ZERO_EXTRACT.  */
303
 
304
      mark_referenced_resources (SET_SRC (x), res, 0);
305
 
306
      x = SET_DEST (x);
307
      if (GET_CODE (x) == ZERO_EXTRACT
308
          || GET_CODE (x) == STRICT_LOW_PART)
309
        mark_referenced_resources (x, res, 0);
310
      else if (GET_CODE (x) == SUBREG)
311
        x = SUBREG_REG (x);
312
      if (MEM_P (x))
313
        mark_referenced_resources (XEXP (x, 0), res, 0);
314
      return;
315
 
316
    case CLOBBER:
317
      return;
318
 
319
    case CALL_INSN:
320
      if (include_delayed_effects)
321
        {
322
          /* A CALL references memory, the frame pointer if it exists, the
323
             stack pointer, any global registers and any registers given in
324
             USE insns immediately in front of the CALL.
325
 
326
             However, we may have moved some of the parameter loading insns
327
             into the delay slot of this CALL.  If so, the USE's for them
328
             don't count and should be skipped.  */
329
          rtx insn = PREV_INSN (x);
330
          rtx sequence = 0;
331
          int seq_size = 0;
332
          int i;
333
 
334
          /* If we are part of a delay slot sequence, point at the SEQUENCE.  */
335
          if (NEXT_INSN (insn) != x)
336
            {
337
              sequence = PATTERN (NEXT_INSN (insn));
338
              seq_size = XVECLEN (sequence, 0);
339
              gcc_assert (GET_CODE (sequence) == SEQUENCE);
340
            }
341
 
342
          res->memory = 1;
343
          SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
344
          if (frame_pointer_needed)
345
            {
346
              SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
347
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
348
              SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
349
#endif
350
            }
351
 
352
          for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
353
            if (global_regs[i])
354
              SET_HARD_REG_BIT (res->regs, i);
355
 
356
          /* Check for a REG_SETJMP.  If it exists, then we must
357
             assume that this call can need any register.
358
 
359
             This is done to be more conservative about how we handle setjmp.
360
             We assume that they both use and set all registers.  Using all
361
             registers ensures that a register will not be considered dead
362
             just because it crosses a setjmp call.  A register should be
363
             considered dead only if the setjmp call returns nonzero.  */
364
          if (find_reg_note (x, REG_SETJMP, NULL))
365
            SET_HARD_REG_SET (res->regs);
366
 
367
          {
368
            rtx link;
369
 
370
            for (link = CALL_INSN_FUNCTION_USAGE (x);
371
                 link;
372
                 link = XEXP (link, 1))
373
              if (GET_CODE (XEXP (link, 0)) == USE)
374
                {
375
                  for (i = 1; i < seq_size; i++)
376
                    {
377
                      rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
378
                      if (GET_CODE (slot_pat) == SET
379
                          && rtx_equal_p (SET_DEST (slot_pat),
380
                                          XEXP (XEXP (link, 0), 0)))
381
                        break;
382
                    }
383
                  if (i >= seq_size)
384
                    mark_referenced_resources (XEXP (XEXP (link, 0), 0),
385
                                               res, 0);
386
                }
387
          }
388
        }
389
 
390
      /* ... fall through to other INSN processing ...  */
391
 
392
    case INSN:
393
    case JUMP_INSN:
394
 
395
#ifdef INSN_REFERENCES_ARE_DELAYED
396
      if (! include_delayed_effects
397
          && INSN_REFERENCES_ARE_DELAYED (x))
398
        return;
399
#endif
400
 
401
      /* No special processing, just speed up.  */
402
      mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
403
      return;
404
 
405
    default:
406
      break;
407
    }
408
 
409
  /* Process each sub-expression and flag what it needs.  */
410
  format_ptr = GET_RTX_FORMAT (code);
411
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
412
    switch (*format_ptr++)
413
      {
414
      case 'e':
415
        mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
416
        break;
417
 
418
      case 'E':
419
        for (j = 0; j < XVECLEN (x, i); j++)
420
          mark_referenced_resources (XVECEXP (x, i, j), res,
421
                                     include_delayed_effects);
422
        break;
423
      }
424
}
425
 
426
/* A subroutine of mark_target_live_regs.  Search forward from TARGET
427
   looking for registers that are set before they are used.  These are dead.
428
   Stop after passing a few conditional jumps, and/or a small
429
   number of unconditional branches.  */
430
 
431
static rtx
432
find_dead_or_set_registers (rtx target, struct resources *res,
433
                            rtx *jump_target, int jump_count,
434
                            struct resources set, struct resources needed)
435
{
436
  HARD_REG_SET scratch;
437
  rtx insn, next;
438
  rtx jump_insn = 0;
439
  int i;
440
 
441
  for (insn = target; insn; insn = next)
442
    {
443
      rtx this_jump_insn = insn;
444
 
445
      next = NEXT_INSN (insn);
446
 
447
      /* If this instruction can throw an exception, then we don't
448
         know where we might end up next.  That means that we have to
449
         assume that whatever we have already marked as live really is
450
         live.  */
451
      if (can_throw_internal (insn))
452
        break;
453
 
454
      switch (GET_CODE (insn))
455
        {
456
        case CODE_LABEL:
457
          /* After a label, any pending dead registers that weren't yet
458
             used can be made dead.  */
459
          AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
460
          AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
461
          CLEAR_HARD_REG_SET (pending_dead_regs);
462
 
463
          continue;
464
 
465
        case BARRIER:
466
        case NOTE:
467
          continue;
468
 
469
        case INSN:
470
          if (GET_CODE (PATTERN (insn)) == USE)
471
            {
472
              /* If INSN is a USE made by update_block, we care about the
473
                 underlying insn.  Any registers set by the underlying insn
474
                 are live since the insn is being done somewhere else.  */
475
              if (INSN_P (XEXP (PATTERN (insn), 0)))
476
                mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
477
                                    MARK_SRC_DEST_CALL);
478
 
479
              /* All other USE insns are to be ignored.  */
480
              continue;
481
            }
482
          else if (GET_CODE (PATTERN (insn)) == CLOBBER)
483
            continue;
484
          else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
485
            {
486
              /* An unconditional jump can be used to fill the delay slot
487
                 of a call, so search for a JUMP_INSN in any position.  */
488
              for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
489
                {
490
                  this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
491
                  if (JUMP_P (this_jump_insn))
492
                    break;
493
                }
494
            }
495
 
496
        default:
497
          break;
498
        }
499
 
500
      if (JUMP_P (this_jump_insn))
501
        {
502
          if (jump_count++ < 10)
503
            {
504
              if (any_uncondjump_p (this_jump_insn)
505
                  || GET_CODE (PATTERN (this_jump_insn)) == RETURN)
506
                {
507
                  next = JUMP_LABEL (this_jump_insn);
508
                  if (jump_insn == 0)
509
                    {
510
                      jump_insn = insn;
511
                      if (jump_target)
512
                        *jump_target = JUMP_LABEL (this_jump_insn);
513
                    }
514
                }
515
              else if (any_condjump_p (this_jump_insn))
516
                {
517
                  struct resources target_set, target_res;
518
                  struct resources fallthrough_res;
519
 
520
                  /* We can handle conditional branches here by following
521
                     both paths, and then IOR the results of the two paths
522
                     together, which will give us registers that are dead
523
                     on both paths.  Since this is expensive, we give it
524
                     a much higher cost than unconditional branches.  The
525
                     cost was chosen so that we will follow at most 1
526
                     conditional branch.  */
527
 
528
                  jump_count += 4;
529
                  if (jump_count >= 10)
530
                    break;
531
 
532
                  mark_referenced_resources (insn, &needed, 1);
533
 
534
                  /* For an annulled branch, mark_set_resources ignores slots
535
                     filled by instructions from the target.  This is correct
536
                     if the branch is not taken.  Since we are following both
537
                     paths from the branch, we must also compute correct info
538
                     if the branch is taken.  We do this by inverting all of
539
                     the INSN_FROM_TARGET_P bits, calling mark_set_resources,
540
                     and then inverting the INSN_FROM_TARGET_P bits again.  */
541
 
542
                  if (GET_CODE (PATTERN (insn)) == SEQUENCE
543
                      && INSN_ANNULLED_BRANCH_P (this_jump_insn))
544
                    {
545
                      for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
546
                        INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
547
                          = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
548
 
549
                      target_set = set;
550
                      mark_set_resources (insn, &target_set, 0,
551
                                          MARK_SRC_DEST_CALL);
552
 
553
                      for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
554
                        INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
555
                          = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
556
 
557
                      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
558
                    }
559
                  else
560
                    {
561
                      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
562
                      target_set = set;
563
                    }
564
 
565
                  target_res = *res;
566
                  COPY_HARD_REG_SET (scratch, target_set.regs);
567
                  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
568
                  AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
569
 
570
                  fallthrough_res = *res;
571
                  COPY_HARD_REG_SET (scratch, set.regs);
572
                  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
573
                  AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
574
 
575
                  find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
576
                                              &target_res, 0, jump_count,
577
                                              target_set, needed);
578
                  find_dead_or_set_registers (next,
579
                                              &fallthrough_res, 0, jump_count,
580
                                              set, needed);
581
                  IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
582
                  AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
583
                  break;
584
                }
585
              else
586
                break;
587
            }
588
          else
589
            {
590
              /* Don't try this optimization if we expired our jump count
591
                 above, since that would mean there may be an infinite loop
592
                 in the function being compiled.  */
593
              jump_insn = 0;
594
              break;
595
            }
596
        }
597
 
598
      mark_referenced_resources (insn, &needed, 1);
599
      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
600
 
601
      COPY_HARD_REG_SET (scratch, set.regs);
602
      AND_COMPL_HARD_REG_SET (scratch, needed.regs);
603
      AND_COMPL_HARD_REG_SET (res->regs, scratch);
604
    }
605
 
606
  return jump_insn;
607
}
608
 
609
/* Given X, a part of an insn, and a pointer to a `struct resource',
610
   RES, indicate which resources are modified by the insn. If
611
   MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
612
   set by the called routine.
613
 
614
   If IN_DEST is nonzero, it means we are inside a SET.  Otherwise,
615
   objects are being referenced instead of set.
616
 
617
   We never mark the insn as modifying the condition code unless it explicitly
618
   SETs CC0 even though this is not totally correct.  The reason for this is
619
   that we require a SET of CC0 to immediately precede the reference to CC0.
620
   So if some other insn sets CC0 as a side-effect, we know it cannot affect
621
   our computation and thus may be placed in a delay slot.  */
622
 
623
void
624
mark_set_resources (rtx x, struct resources *res, int in_dest,
625
                    enum mark_resource_type mark_type)
626
{
627
  enum rtx_code code;
628
  int i, j;
629
  unsigned int r;
630
  const char *format_ptr;
631
 
632
 restart:
633
 
634
  code = GET_CODE (x);
635
 
636
  switch (code)
637
    {
638
    case NOTE:
639
    case BARRIER:
640
    case CODE_LABEL:
641
    case USE:
642
    case CONST_INT:
643
    case CONST_DOUBLE:
644
    case CONST_VECTOR:
645
    case LABEL_REF:
646
    case SYMBOL_REF:
647
    case CONST:
648
    case PC:
649
      /* These don't set any resources.  */
650
      return;
651
 
652
    case CC0:
653
      if (in_dest)
654
        res->cc = 1;
655
      return;
656
 
657
    case CALL_INSN:
658
      /* Called routine modifies the condition code, memory, any registers
659
         that aren't saved across calls, global registers and anything
660
         explicitly CLOBBERed immediately after the CALL_INSN.  */
661
 
662
      if (mark_type == MARK_SRC_DEST_CALL)
663
        {
664
          rtx link;
665
 
666
          res->cc = res->memory = 1;
667
          for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
668
            if (call_used_regs[r] || global_regs[r])
669
              SET_HARD_REG_BIT (res->regs, r);
670
 
671
          for (link = CALL_INSN_FUNCTION_USAGE (x);
672
               link; link = XEXP (link, 1))
673
            if (GET_CODE (XEXP (link, 0)) == CLOBBER)
674
              mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
675
                                  MARK_SRC_DEST);
676
 
677
          /* Check for a REG_SETJMP.  If it exists, then we must
678
             assume that this call can clobber any register.  */
679
          if (find_reg_note (x, REG_SETJMP, NULL))
680
            SET_HARD_REG_SET (res->regs);
681
        }
682
 
683
      /* ... and also what its RTL says it modifies, if anything.  */
684
 
685
    case JUMP_INSN:
686
    case INSN:
687
 
688
        /* An insn consisting of just a CLOBBER (or USE) is just for flow
689
           and doesn't actually do anything, so we ignore it.  */
690
 
691
#ifdef INSN_SETS_ARE_DELAYED
692
      if (mark_type != MARK_SRC_DEST_CALL
693
          && INSN_SETS_ARE_DELAYED (x))
694
        return;
695
#endif
696
 
697
      x = PATTERN (x);
698
      if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
699
        goto restart;
700
      return;
701
 
702
    case SET:
703
      /* If the source of a SET is a CALL, this is actually done by
704
         the called routine.  So only include it if we are to include the
705
         effects of the calling routine.  */
706
 
707
      mark_set_resources (SET_DEST (x), res,
708
                          (mark_type == MARK_SRC_DEST_CALL
709
                           || GET_CODE (SET_SRC (x)) != CALL),
710
                          mark_type);
711
 
712
      mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
713
      return;
714
 
715
    case CLOBBER:
716
      mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
717
      return;
718
 
719
    case SEQUENCE:
720
      for (i = 0; i < XVECLEN (x, 0); i++)
721
        if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
722
               && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
723
          mark_set_resources (XVECEXP (x, 0, i), res, 0, mark_type);
724
      return;
725
 
726
    case POST_INC:
727
    case PRE_INC:
728
    case POST_DEC:
729
    case PRE_DEC:
730
      mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
731
      return;
732
 
733
    case PRE_MODIFY:
734
    case POST_MODIFY:
735
      mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
736
      mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
737
      mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
738
      return;
739
 
740
    case SIGN_EXTRACT:
741
    case ZERO_EXTRACT:
742
      mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
743
      mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
744
      mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
745
      return;
746
 
747
    case MEM:
748
      if (in_dest)
749
        {
750
          res->memory = 1;
751
          res->unch_memory |= MEM_READONLY_P (x);
752
          res->volatil |= MEM_VOLATILE_P (x);
753
        }
754
 
755
      mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
756
      return;
757
 
758
    case SUBREG:
759
      if (in_dest)
760
        {
761
          if (!REG_P (SUBREG_REG (x)))
762
            mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
763
          else
764
            {
765
              unsigned int regno = subreg_regno (x);
766
              unsigned int last_regno
767
                = regno + hard_regno_nregs[regno][GET_MODE (x)];
768
 
769
              gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
770
              for (r = regno; r < last_regno; r++)
771
                SET_HARD_REG_BIT (res->regs, r);
772
            }
773
        }
774
      return;
775
 
776
    case REG:
777
      if (in_dest)
778
        {
779
          unsigned int regno = REGNO (x);
780
          unsigned int last_regno
781
            = regno + hard_regno_nregs[regno][GET_MODE (x)];
782
 
783
          gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
784
          for (r = regno; r < last_regno; r++)
785
            SET_HARD_REG_BIT (res->regs, r);
786
        }
787
      return;
788
 
789
    case UNSPEC_VOLATILE:
790
    case ASM_INPUT:
791
      /* Traditional asm's are always volatile.  */
792
      res->volatil = 1;
793
      return;
794
 
795
    case TRAP_IF:
796
      res->volatil = 1;
797
      break;
798
 
799
    case ASM_OPERANDS:
800
      res->volatil |= MEM_VOLATILE_P (x);
801
 
802
      /* For all ASM_OPERANDS, we must traverse the vector of input operands.
803
         We can not just fall through here since then we would be confused
804
         by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
805
         traditional asms unlike their normal usage.  */
806
 
807
      for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
808
        mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
809
                            MARK_SRC_DEST);
810
      return;
811
 
812
    default:
813
      break;
814
    }
815
 
816
  /* Process each sub-expression and flag what it needs.  */
817
  format_ptr = GET_RTX_FORMAT (code);
818
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
819
    switch (*format_ptr++)
820
      {
821
      case 'e':
822
        mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
823
        break;
824
 
825
      case 'E':
826
        for (j = 0; j < XVECLEN (x, i); j++)
827
          mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
828
        break;
829
      }
830
}
831
 
832
/* Return TRUE if INSN is a return, possibly with a filled delay slot.  */
833
 
834
static bool
835
return_insn_p (rtx insn)
836
{
837
  if (JUMP_P (insn) && GET_CODE (PATTERN (insn)) == RETURN)
838
    return true;
839
 
840
  if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
841
    return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
842
 
843
  return false;
844
}
845
 
846
/* Set the resources that are live at TARGET.
847
 
848
   If TARGET is zero, we refer to the end of the current function and can
849
   return our precomputed value.
850
 
851
   Otherwise, we try to find out what is live by consulting the basic block
852
   information.  This is tricky, because we must consider the actions of
853
   reload and jump optimization, which occur after the basic block information
854
   has been computed.
855
 
856
   Accordingly, we proceed as follows::
857
 
858
   We find the previous BARRIER and look at all immediately following labels
859
   (with no intervening active insns) to see if any of them start a basic
860
   block.  If we hit the start of the function first, we use block 0.
861
 
862
   Once we have found a basic block and a corresponding first insns, we can
863
   accurately compute the live status from basic_block_live_regs and
864
   reg_renumber.  (By starting at a label following a BARRIER, we are immune
865
   to actions taken by reload and jump.)  Then we scan all insns between
866
   that point and our target.  For each CLOBBER (or for call-clobbered regs
867
   when we pass a CALL_INSN), mark the appropriate registers are dead.  For
868
   a SET, mark them as live.
869
 
870
   We have to be careful when using REG_DEAD notes because they are not
871
   updated by such things as find_equiv_reg.  So keep track of registers
872
   marked as dead that haven't been assigned to, and mark them dead at the
873
   next CODE_LABEL since reload and jump won't propagate values across labels.
874
 
875
   If we cannot find the start of a basic block (should be a very rare
876
   case, if it can happen at all), mark everything as potentially live.
877
 
878
   Next, scan forward from TARGET looking for things set or clobbered
879
   before they are used.  These are not live.
880
 
881
   Because we can be called many times on the same target, save our results
882
   in a hash table indexed by INSN_UID.  This is only done if the function
883
   init_resource_info () was invoked before we are called.  */
884
 
885
void
886
mark_target_live_regs (rtx insns, rtx target, struct resources *res)
887
{
888
  int b = -1;
889
  unsigned int i;
890
  struct target_info *tinfo = NULL;
891
  rtx insn;
892
  rtx jump_insn = 0;
893
  rtx jump_target;
894
  HARD_REG_SET scratch;
895
  struct resources set, needed;
896
 
897
  /* Handle end of function.  */
898
  if (target == 0)
899
    {
900
      *res = end_of_function_needs;
901
      return;
902
    }
903
 
904
  /* Handle return insn.  */
905
  else if (return_insn_p (target))
906
    {
907
      *res = end_of_function_needs;
908
      mark_referenced_resources (target, res, 0);
909
      return;
910
    }
911
 
912
  /* We have to assume memory is needed, but the CC isn't.  */
913
  res->memory = 1;
914
  res->volatil = res->unch_memory = 0;
915
  res->cc = 0;
916
 
917
  /* See if we have computed this value already.  */
918
  if (target_hash_table != NULL)
919
    {
920
      for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
921
           tinfo; tinfo = tinfo->next)
922
        if (tinfo->uid == INSN_UID (target))
923
          break;
924
 
925
      /* Start by getting the basic block number.  If we have saved
926
         information, we can get it from there unless the insn at the
927
         start of the basic block has been deleted.  */
928
      if (tinfo && tinfo->block != -1
929
          && ! INSN_DELETED_P (BB_HEAD (BASIC_BLOCK (tinfo->block))))
930
        b = tinfo->block;
931
    }
932
 
933
  if (b == -1)
934
    b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH);
935
 
936
  if (target_hash_table != NULL)
937
    {
938
      if (tinfo)
939
        {
940
          /* If the information is up-to-date, use it.  Otherwise, we will
941
             update it below.  */
942
          if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
943
            {
944
              COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
945
              return;
946
            }
947
        }
948
      else
949
        {
950
          /* Allocate a place to put our results and chain it into the
951
             hash table.  */
952
          tinfo = xmalloc (sizeof (struct target_info));
953
          tinfo->uid = INSN_UID (target);
954
          tinfo->block = b;
955
          tinfo->next
956
            = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
957
          target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
958
        }
959
    }
960
 
961
  CLEAR_HARD_REG_SET (pending_dead_regs);
962
 
963
  /* If we found a basic block, get the live registers from it and update
964
     them with anything set or killed between its start and the insn before
965
     TARGET.  Otherwise, we must assume everything is live.  */
966
  if (b != -1)
967
    {
968
      regset regs_live = BASIC_BLOCK (b)->il.rtl->global_live_at_start;
969
      unsigned int j;
970
      unsigned int regno;
971
      rtx start_insn, stop_insn;
972
      reg_set_iterator rsi;
973
 
974
      /* Compute hard regs live at start of block -- this is the real hard regs
975
         marked live, plus live pseudo regs that have been renumbered to
976
         hard regs.  */
977
 
978
      REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
979
 
980
      EXECUTE_IF_SET_IN_REG_SET (regs_live, FIRST_PSEUDO_REGISTER, i, rsi)
981
        {
982
          if (reg_renumber[i] >= 0)
983
            {
984
              regno = reg_renumber[i];
985
              for (j = regno;
986
                   j < regno + hard_regno_nregs[regno][PSEUDO_REGNO_MODE (i)];
987
                   j++)
988
                SET_HARD_REG_BIT (current_live_regs, j);
989
            }
990
        }
991
 
992
      /* Get starting and ending insn, handling the case where each might
993
         be a SEQUENCE.  */
994
      start_insn = (b == 0 ? insns : BB_HEAD (BASIC_BLOCK (b)));
995
      stop_insn = target;
996
 
997
      if (NONJUMP_INSN_P (start_insn)
998
          && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
999
        start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
1000
 
1001
      if (NONJUMP_INSN_P (stop_insn)
1002
          && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
1003
        stop_insn = next_insn (PREV_INSN (stop_insn));
1004
 
1005
      for (insn = start_insn; insn != stop_insn;
1006
           insn = next_insn_no_annul (insn))
1007
        {
1008
          rtx link;
1009
          rtx real_insn = insn;
1010
          enum rtx_code code = GET_CODE (insn);
1011
 
1012
          /* If this insn is from the target of a branch, it isn't going to
1013
             be used in the sequel.  If it is used in both cases, this
1014
             test will not be true.  */
1015
          if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
1016
              && INSN_FROM_TARGET_P (insn))
1017
            continue;
1018
 
1019
          /* If this insn is a USE made by update_block, we care about the
1020
             underlying insn.  */
1021
          if (code == INSN && GET_CODE (PATTERN (insn)) == USE
1022
              && INSN_P (XEXP (PATTERN (insn), 0)))
1023
              real_insn = XEXP (PATTERN (insn), 0);
1024
 
1025
          if (CALL_P (real_insn))
1026
            {
1027
              /* CALL clobbers all call-used regs that aren't fixed except
1028
                 sp, ap, and fp.  Do this before setting the result of the
1029
                 call live.  */
1030
              AND_COMPL_HARD_REG_SET (current_live_regs,
1031
                                      regs_invalidated_by_call);
1032
 
1033
              /* A CALL_INSN sets any global register live, since it may
1034
                 have been modified by the call.  */
1035
              for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1036
                if (global_regs[i])
1037
                  SET_HARD_REG_BIT (current_live_regs, i);
1038
            }
1039
 
1040
          /* Mark anything killed in an insn to be deadened at the next
1041
             label.  Ignore USE insns; the only REG_DEAD notes will be for
1042
             parameters.  But they might be early.  A CALL_INSN will usually
1043
             clobber registers used for parameters.  It isn't worth bothering
1044
             with the unlikely case when it won't.  */
1045
          if ((NONJUMP_INSN_P (real_insn)
1046
               && GET_CODE (PATTERN (real_insn)) != USE
1047
               && GET_CODE (PATTERN (real_insn)) != CLOBBER)
1048
              || JUMP_P (real_insn)
1049
              || CALL_P (real_insn))
1050
            {
1051
              for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1052
                if (REG_NOTE_KIND (link) == REG_DEAD
1053
                    && REG_P (XEXP (link, 0))
1054
                    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1055
                  {
1056
                    unsigned int first_regno = REGNO (XEXP (link, 0));
1057
                    unsigned int last_regno
1058
                      = (first_regno
1059
                         + hard_regno_nregs[first_regno]
1060
                                           [GET_MODE (XEXP (link, 0))]);
1061
 
1062
                    for (i = first_regno; i < last_regno; i++)
1063
                      SET_HARD_REG_BIT (pending_dead_regs, i);
1064
                  }
1065
 
1066
              note_stores (PATTERN (real_insn), update_live_status, NULL);
1067
 
1068
              /* If any registers were unused after this insn, kill them.
1069
                 These notes will always be accurate.  */
1070
              for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1071
                if (REG_NOTE_KIND (link) == REG_UNUSED
1072
                    && REG_P (XEXP (link, 0))
1073
                    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1074
                  {
1075
                    unsigned int first_regno = REGNO (XEXP (link, 0));
1076
                    unsigned int last_regno
1077
                      = (first_regno
1078
                         + hard_regno_nregs[first_regno]
1079
                                           [GET_MODE (XEXP (link, 0))]);
1080
 
1081
                    for (i = first_regno; i < last_regno; i++)
1082
                      CLEAR_HARD_REG_BIT (current_live_regs, i);
1083
                  }
1084
            }
1085
 
1086
          else if (LABEL_P (real_insn))
1087
            {
1088
              /* A label clobbers the pending dead registers since neither
1089
                 reload nor jump will propagate a value across a label.  */
1090
              AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
1091
              CLEAR_HARD_REG_SET (pending_dead_regs);
1092
            }
1093
 
1094
          /* The beginning of the epilogue corresponds to the end of the
1095
             RTL chain when there are no epilogue insns.  Certain resources
1096
             are implicitly required at that point.  */
1097
          else if (NOTE_P (real_insn)
1098
                   && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1099
            IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
1100
        }
1101
 
1102
      COPY_HARD_REG_SET (res->regs, current_live_regs);
1103
      if (tinfo != NULL)
1104
        {
1105
          tinfo->block = b;
1106
          tinfo->bb_tick = bb_ticks[b];
1107
        }
1108
    }
1109
  else
1110
    /* We didn't find the start of a basic block.  Assume everything
1111
       in use.  This should happen only extremely rarely.  */
1112
    SET_HARD_REG_SET (res->regs);
1113
 
1114
  CLEAR_RESOURCE (&set);
1115
  CLEAR_RESOURCE (&needed);
1116
 
1117
  jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
1118
                                          set, needed);
1119
 
1120
  /* If we hit an unconditional branch, we have another way of finding out
1121
     what is live: we can see what is live at the branch target and include
1122
     anything used but not set before the branch.  We add the live
1123
     resources found using the test below to those found until now.  */
1124
 
1125
  if (jump_insn)
1126
    {
1127
      struct resources new_resources;
1128
      rtx stop_insn = next_active_insn (jump_insn);
1129
 
1130
      mark_target_live_regs (insns, next_active_insn (jump_target),
1131
                             &new_resources);
1132
      CLEAR_RESOURCE (&set);
1133
      CLEAR_RESOURCE (&needed);
1134
 
1135
      /* Include JUMP_INSN in the needed registers.  */
1136
      for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1137
        {
1138
          mark_referenced_resources (insn, &needed, 1);
1139
 
1140
          COPY_HARD_REG_SET (scratch, needed.regs);
1141
          AND_COMPL_HARD_REG_SET (scratch, set.regs);
1142
          IOR_HARD_REG_SET (new_resources.regs, scratch);
1143
 
1144
          mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1145
        }
1146
 
1147
      IOR_HARD_REG_SET (res->regs, new_resources.regs);
1148
    }
1149
 
1150
  if (tinfo != NULL)
1151
    {
1152
      COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
1153
    }
1154
}
1155
 
1156
/* Initialize the resources required by mark_target_live_regs ().
1157
   This should be invoked before the first call to mark_target_live_regs.  */
1158
 
1159
void
1160
init_resource_info (rtx epilogue_insn)
1161
{
1162
  int i;
1163
 
1164
  /* Indicate what resources are required to be valid at the end of the current
1165
     function.  The condition code never is and memory always is.  If the
1166
     frame pointer is needed, it is and so is the stack pointer unless
1167
     EXIT_IGNORE_STACK is nonzero.  If the frame pointer is not needed, the
1168
     stack pointer is.  Registers used to return the function value are
1169
     needed.  Registers holding global variables are needed.  */
1170
 
1171
  end_of_function_needs.cc = 0;
1172
  end_of_function_needs.memory = 1;
1173
  end_of_function_needs.unch_memory = 0;
1174
  CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1175
 
1176
  if (frame_pointer_needed)
1177
    {
1178
      SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1179
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1180
      SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
1181
#endif
1182
      if (! EXIT_IGNORE_STACK
1183
          || current_function_sp_is_unchanging)
1184
        SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1185
    }
1186
  else
1187
    SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1188
 
1189
  if (current_function_return_rtx != 0)
1190
    mark_referenced_resources (current_function_return_rtx,
1191
                               &end_of_function_needs, 1);
1192
 
1193
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1194
    if (global_regs[i]
1195
#ifdef EPILOGUE_USES
1196
        || EPILOGUE_USES (i)
1197
#endif
1198
        )
1199
      SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1200
 
1201
  /* The registers required to be live at the end of the function are
1202
     represented in the flow information as being dead just prior to
1203
     reaching the end of the function.  For example, the return of a value
1204
     might be represented by a USE of the return register immediately
1205
     followed by an unconditional jump to the return label where the
1206
     return label is the end of the RTL chain.  The end of the RTL chain
1207
     is then taken to mean that the return register is live.
1208
 
1209
     This sequence is no longer maintained when epilogue instructions are
1210
     added to the RTL chain.  To reconstruct the original meaning, the
1211
     start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1212
     point where these registers become live (start_of_epilogue_needs).
1213
     If epilogue instructions are present, the registers set by those
1214
     instructions won't have been processed by flow.  Thus, those
1215
     registers are additionally required at the end of the RTL chain
1216
     (end_of_function_needs).  */
1217
 
1218
  start_of_epilogue_needs = end_of_function_needs;
1219
 
1220
  while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1221
    {
1222
      mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1223
                          MARK_SRC_DEST_CALL);
1224
      if (return_insn_p (epilogue_insn))
1225
        break;
1226
    }
1227
 
1228
  /* Allocate and initialize the tables used by mark_target_live_regs.  */
1229
  target_hash_table = xcalloc (TARGET_HASH_PRIME, sizeof (struct target_info *));
1230
  bb_ticks = xcalloc (last_basic_block, sizeof (int));
1231
}
1232
 
1233
/* Free up the resources allocated to mark_target_live_regs ().  This
1234
   should be invoked after the last call to mark_target_live_regs ().  */
1235
 
1236
void
1237
free_resource_info (void)
1238
{
1239
  if (target_hash_table != NULL)
1240
    {
1241
      int i;
1242
 
1243
      for (i = 0; i < TARGET_HASH_PRIME; ++i)
1244
        {
1245
          struct target_info *ti = target_hash_table[i];
1246
 
1247
          while (ti)
1248
            {
1249
              struct target_info *next = ti->next;
1250
              free (ti);
1251
              ti = next;
1252
            }
1253
        }
1254
 
1255
      free (target_hash_table);
1256
      target_hash_table = NULL;
1257
    }
1258
 
1259
  if (bb_ticks != NULL)
1260
    {
1261
      free (bb_ticks);
1262
      bb_ticks = NULL;
1263
    }
1264
}
1265
 
1266
/* Clear any hashed information that we have stored for INSN.  */
1267
 
1268
void
1269
clear_hashed_info_for_insn (rtx insn)
1270
{
1271
  struct target_info *tinfo;
1272
 
1273
  if (target_hash_table != NULL)
1274
    {
1275
      for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1276
           tinfo; tinfo = tinfo->next)
1277
        if (tinfo->uid == INSN_UID (insn))
1278
          break;
1279
 
1280
      if (tinfo)
1281
        tinfo->block = -1;
1282
    }
1283
}
1284
 
1285
/* Increment the tick count for the basic block that contains INSN.  */
1286
 
1287
void
1288
incr_ticks_for_insn (rtx insn)
1289
{
1290
  int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH);
1291
 
1292
  if (b != -1)
1293
    bb_ticks[b]++;
1294
}
1295
 
1296
/* Add TRIAL to the set of resources used at the end of the current
1297
   function.  */
1298
void
1299
mark_end_of_function_resources (rtx trial, int include_delayed_effects)
1300
{
1301
  mark_referenced_resources (trial, &end_of_function_needs,
1302
                             include_delayed_effects);
1303
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.