OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [resource.c] - Blame information for rev 816

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Definitions for computing resource usage of specific insns.
2
   Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 3, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "toplev.h"
26
#include "rtl.h"
27
#include "tm_p.h"
28
#include "hard-reg-set.h"
29
#include "function.h"
30
#include "regs.h"
31
#include "flags.h"
32
#include "output.h"
33
#include "resource.h"
34
#include "except.h"
35
#include "insn-attr.h"
36
#include "params.h"
37
 
38
/* This structure is used to record liveness information at the targets or
39
   fallthrough insns of branches.  We will most likely need the information
40
   at targets again, so save them in a hash table rather than recomputing them
41
   each time.  */
42
 
43
struct target_info
44
{
45
  int uid;                      /* INSN_UID of target.  */
46
  struct target_info *next;     /* Next info for same hash bucket.  */
47
  HARD_REG_SET live_regs;       /* Registers live at target.  */
48
  int block;                    /* Basic block number containing target.  */
49
  int bb_tick;                  /* Generation count of basic block info.  */
50
};
51
 
52
#define TARGET_HASH_PRIME 257
53
 
54
/* Indicates what resources are required at the beginning of the epilogue.  */
55
static struct resources start_of_epilogue_needs;
56
 
57
/* Indicates what resources are required at function end.  */
58
static struct resources end_of_function_needs;
59
 
60
/* Define the hash table itself.  */
61
static struct target_info **target_hash_table = NULL;
62
 
63
/* For each basic block, we maintain a generation number of its basic
64
   block info, which is updated each time we move an insn from the
65
   target of a jump.  This is the generation number indexed by block
66
   number.  */
67
 
68
static int *bb_ticks;
69
 
70
/* Marks registers possibly live at the current place being scanned by
71
   mark_target_live_regs.  Also used by update_live_status.  */
72
 
73
static HARD_REG_SET current_live_regs;
74
 
75
/* Marks registers for which we have seen a REG_DEAD note but no assignment.
76
   Also only used by the next two functions.  */
77
 
78
static HARD_REG_SET pending_dead_regs;
79
 
80
static void update_live_status (rtx, rtx, void *);
81
static int find_basic_block (rtx, int);
82
static rtx next_insn_no_annul (rtx);
83
static rtx find_dead_or_set_registers (rtx, struct resources*,
84
                                       rtx*, int, struct resources,
85
                                       struct resources);
86
 
87
/* Utility function called from mark_target_live_regs via note_stores.
88
   It deadens any CLOBBERed registers and livens any SET registers.  */
89
 
90
static void
91
update_live_status (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
92
{
93
  int first_regno, last_regno;
94
  int i;
95
 
96
  if (!REG_P (dest)
97
      && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
98
    return;
99
 
100
  if (GET_CODE (dest) == SUBREG)
101
    first_regno = subreg_regno (dest);
102
  else
103
    first_regno = REGNO (dest);
104
 
105
  last_regno = first_regno + hard_regno_nregs[first_regno][GET_MODE (dest)];
106
 
107
  if (GET_CODE (x) == CLOBBER)
108
    for (i = first_regno; i < last_regno; i++)
109
      CLEAR_HARD_REG_BIT (current_live_regs, i);
110
  else
111
    for (i = first_regno; i < last_regno; i++)
112
      {
113
        SET_HARD_REG_BIT (current_live_regs, i);
114
        CLEAR_HARD_REG_BIT (pending_dead_regs, i);
115
      }
116
}
117
 
118
/* Find the number of the basic block with correct live register
119
   information that starts closest to INSN.  Return -1 if we couldn't
120
   find such a basic block or the beginning is more than
121
   SEARCH_LIMIT instructions before INSN.  Use SEARCH_LIMIT = -1 for
122
   an unlimited search.
123
 
124
   The delay slot filling code destroys the control-flow graph so,
125
   instead of finding the basic block containing INSN, we search
126
   backwards toward a BARRIER where the live register information is
127
   correct.  */
128
 
129
static int
130
find_basic_block (rtx insn, int search_limit)
131
{
132
  basic_block bb;
133
 
134
  /* Scan backwards to the previous BARRIER.  Then see if we can find a
135
     label that starts a basic block.  Return the basic block number.  */
136
  for (insn = prev_nonnote_insn (insn);
137
       insn && !BARRIER_P (insn) && search_limit != 0;
138
       insn = prev_nonnote_insn (insn), --search_limit)
139
    ;
140
 
141
  /* The closest BARRIER is too far away.  */
142
  if (search_limit == 0)
143
    return -1;
144
 
145
  /* The start of the function.  */
146
  else if (insn == 0)
147
    return ENTRY_BLOCK_PTR->next_bb->index;
148
 
149
  /* See if any of the upcoming CODE_LABELs start a basic block.  If we reach
150
     anything other than a CODE_LABEL or note, we can't find this code.  */
151
  for (insn = next_nonnote_insn (insn);
152
       insn && LABEL_P (insn);
153
       insn = next_nonnote_insn (insn))
154
    {
155
      FOR_EACH_BB (bb)
156
        if (insn == BB_HEAD (bb))
157
          return bb->index;
158
    }
159
 
160
  return -1;
161
}
162
 
163
/* Similar to next_insn, but ignores insns in the delay slots of
164
   an annulled branch.  */
165
 
166
static rtx
167
next_insn_no_annul (rtx insn)
168
{
169
  if (insn)
170
    {
171
      /* If INSN is an annulled branch, skip any insns from the target
172
         of the branch.  */
173
      if (INSN_P (insn)
174
          && INSN_ANNULLED_BRANCH_P (insn)
175
          && NEXT_INSN (PREV_INSN (insn)) != insn)
176
        {
177
          rtx next = NEXT_INSN (insn);
178
          enum rtx_code code = GET_CODE (next);
179
 
180
          while ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
181
                 && INSN_FROM_TARGET_P (next))
182
            {
183
              insn = next;
184
              next = NEXT_INSN (insn);
185
              code = GET_CODE (next);
186
            }
187
        }
188
 
189
      insn = NEXT_INSN (insn);
190
      if (insn && NONJUMP_INSN_P (insn)
191
          && GET_CODE (PATTERN (insn)) == SEQUENCE)
192
        insn = XVECEXP (PATTERN (insn), 0, 0);
193
    }
194
 
195
  return insn;
196
}
197
 
198
/* Given X, some rtl, and RES, a pointer to a `struct resource', mark
199
   which resources are referenced by the insn.  If INCLUDE_DELAYED_EFFECTS
200
   is TRUE, resources used by the called routine will be included for
201
   CALL_INSNs.  */
202
 
203
void
204
mark_referenced_resources (rtx x, struct resources *res,
205
                           int include_delayed_effects)
206
{
207
  enum rtx_code code = GET_CODE (x);
208
  int i, j;
209
  unsigned int r;
210
  const char *format_ptr;
211
 
212
  /* Handle leaf items for which we set resource flags.  Also, special-case
213
     CALL, SET and CLOBBER operators.  */
214
  switch (code)
215
    {
216
    case CONST:
217
    case CONST_INT:
218
    case CONST_DOUBLE:
219
    case CONST_VECTOR:
220
    case PC:
221
    case SYMBOL_REF:
222
    case LABEL_REF:
223
      return;
224
 
225
    case SUBREG:
226
      if (!REG_P (SUBREG_REG (x)))
227
        mark_referenced_resources (SUBREG_REG (x), res, 0);
228
      else
229
        {
230
          unsigned int regno = subreg_regno (x);
231
          unsigned int last_regno
232
            = regno + hard_regno_nregs[regno][GET_MODE (x)];
233
 
234
          gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
235
          for (r = regno; r < last_regno; r++)
236
            SET_HARD_REG_BIT (res->regs, r);
237
        }
238
      return;
239
 
240
    case REG:
241
        {
242
          unsigned int regno = REGNO (x);
243
          unsigned int last_regno
244
            = regno + hard_regno_nregs[regno][GET_MODE (x)];
245
 
246
          gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
247
          for (r = regno; r < last_regno; r++)
248
            SET_HARD_REG_BIT (res->regs, r);
249
        }
250
      return;
251
 
252
    case MEM:
253
      /* If this memory shouldn't change, it really isn't referencing
254
         memory.  */
255
      if (MEM_READONLY_P (x))
256
        res->unch_memory = 1;
257
      else
258
        res->memory = 1;
259
      res->volatil |= MEM_VOLATILE_P (x);
260
 
261
      /* Mark registers used to access memory.  */
262
      mark_referenced_resources (XEXP (x, 0), res, 0);
263
      return;
264
 
265
    case CC0:
266
      res->cc = 1;
267
      return;
268
 
269
    case UNSPEC_VOLATILE:
270
    case ASM_INPUT:
271
      /* Traditional asm's are always volatile.  */
272
      res->volatil = 1;
273
      return;
274
 
275
    case TRAP_IF:
276
      res->volatil = 1;
277
      break;
278
 
279
    case ASM_OPERANDS:
280
      res->volatil |= MEM_VOLATILE_P (x);
281
 
282
      /* For all ASM_OPERANDS, we must traverse the vector of input operands.
283
         We can not just fall through here since then we would be confused
284
         by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
285
         traditional asms unlike their normal usage.  */
286
 
287
      for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
288
        mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
289
      return;
290
 
291
    case CALL:
292
      /* The first operand will be a (MEM (xxx)) but doesn't really reference
293
         memory.  The second operand may be referenced, though.  */
294
      mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
295
      mark_referenced_resources (XEXP (x, 1), res, 0);
296
      return;
297
 
298
    case SET:
299
      /* Usually, the first operand of SET is set, not referenced.  But
300
         registers used to access memory are referenced.  SET_DEST is
301
         also referenced if it is a ZERO_EXTRACT.  */
302
 
303
      mark_referenced_resources (SET_SRC (x), res, 0);
304
 
305
      x = SET_DEST (x);
306
      if (GET_CODE (x) == ZERO_EXTRACT
307
          || GET_CODE (x) == STRICT_LOW_PART)
308
        mark_referenced_resources (x, res, 0);
309
      else if (GET_CODE (x) == SUBREG)
310
        x = SUBREG_REG (x);
311
      if (MEM_P (x))
312
        mark_referenced_resources (XEXP (x, 0), res, 0);
313
      return;
314
 
315
    case CLOBBER:
316
      return;
317
 
318
    case CALL_INSN:
319
      if (include_delayed_effects)
320
        {
321
          /* A CALL references memory, the frame pointer if it exists, the
322
             stack pointer, any global registers and any registers given in
323
             USE insns immediately in front of the CALL.
324
 
325
             However, we may have moved some of the parameter loading insns
326
             into the delay slot of this CALL.  If so, the USE's for them
327
             don't count and should be skipped.  */
328
          rtx insn = PREV_INSN (x);
329
          rtx sequence = 0;
330
          int seq_size = 0;
331
          int i;
332
 
333
          /* If we are part of a delay slot sequence, point at the SEQUENCE.  */
334
          if (NEXT_INSN (insn) != x)
335
            {
336
              sequence = PATTERN (NEXT_INSN (insn));
337
              seq_size = XVECLEN (sequence, 0);
338
              gcc_assert (GET_CODE (sequence) == SEQUENCE);
339
            }
340
 
341
          res->memory = 1;
342
          SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
343
          if (frame_pointer_needed)
344
            {
345
              SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
346
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
347
              SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
348
#endif
349
            }
350
 
351
          for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
352
            if (global_regs[i])
353
              SET_HARD_REG_BIT (res->regs, i);
354
 
355
          /* Check for a REG_SETJMP.  If it exists, then we must
356
             assume that this call can need any register.
357
 
358
             This is done to be more conservative about how we handle setjmp.
359
             We assume that they both use and set all registers.  Using all
360
             registers ensures that a register will not be considered dead
361
             just because it crosses a setjmp call.  A register should be
362
             considered dead only if the setjmp call returns nonzero.  */
363
          if (find_reg_note (x, REG_SETJMP, NULL))
364
            SET_HARD_REG_SET (res->regs);
365
 
366
          {
367
            rtx link;
368
 
369
            for (link = CALL_INSN_FUNCTION_USAGE (x);
370
                 link;
371
                 link = XEXP (link, 1))
372
              if (GET_CODE (XEXP (link, 0)) == USE)
373
                {
374
                  for (i = 1; i < seq_size; i++)
375
                    {
376
                      rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
377
                      if (GET_CODE (slot_pat) == SET
378
                          && rtx_equal_p (SET_DEST (slot_pat),
379
                                          XEXP (XEXP (link, 0), 0)))
380
                        break;
381
                    }
382
                  if (i >= seq_size)
383
                    mark_referenced_resources (XEXP (XEXP (link, 0), 0),
384
                                               res, 0);
385
                }
386
          }
387
        }
388
 
389
      /* ... fall through to other INSN processing ...  */
390
 
391
    case INSN:
392
    case JUMP_INSN:
393
 
394
#ifdef INSN_REFERENCES_ARE_DELAYED
395
      if (! include_delayed_effects
396
          && INSN_REFERENCES_ARE_DELAYED (x))
397
        return;
398
#endif
399
 
400
      /* No special processing, just speed up.  */
401
      mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
402
      return;
403
 
404
    default:
405
      break;
406
    }
407
 
408
  /* Process each sub-expression and flag what it needs.  */
409
  format_ptr = GET_RTX_FORMAT (code);
410
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
411
    switch (*format_ptr++)
412
      {
413
      case 'e':
414
        mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
415
        break;
416
 
417
      case 'E':
418
        for (j = 0; j < XVECLEN (x, i); j++)
419
          mark_referenced_resources (XVECEXP (x, i, j), res,
420
                                     include_delayed_effects);
421
        break;
422
      }
423
}
424
 
425
/* A subroutine of mark_target_live_regs.  Search forward from TARGET
426
   looking for registers that are set before they are used.  These are dead.
427
   Stop after passing a few conditional jumps, and/or a small
428
   number of unconditional branches.  */
429
 
430
static rtx
431
find_dead_or_set_registers (rtx target, struct resources *res,
432
                            rtx *jump_target, int jump_count,
433
                            struct resources set, struct resources needed)
434
{
435
  HARD_REG_SET scratch;
436
  rtx insn, next;
437
  rtx jump_insn = 0;
438
  int i;
439
 
440
  for (insn = target; insn; insn = next)
441
    {
442
      rtx this_jump_insn = insn;
443
 
444
      next = NEXT_INSN (insn);
445
 
446
      /* If this instruction can throw an exception, then we don't
447
         know where we might end up next.  That means that we have to
448
         assume that whatever we have already marked as live really is
449
         live.  */
450
      if (can_throw_internal (insn))
451
        break;
452
 
453
      switch (GET_CODE (insn))
454
        {
455
        case CODE_LABEL:
456
          /* After a label, any pending dead registers that weren't yet
457
             used can be made dead.  */
458
          AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
459
          AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
460
          CLEAR_HARD_REG_SET (pending_dead_regs);
461
 
462
          continue;
463
 
464
        case BARRIER:
465
        case NOTE:
466
          continue;
467
 
468
        case INSN:
469
          if (GET_CODE (PATTERN (insn)) == USE)
470
            {
471
              /* If INSN is a USE made by update_block, we care about the
472
                 underlying insn.  Any registers set by the underlying insn
473
                 are live since the insn is being done somewhere else.  */
474
              if (INSN_P (XEXP (PATTERN (insn), 0)))
475
                mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
476
                                    MARK_SRC_DEST_CALL);
477
 
478
              /* All other USE insns are to be ignored.  */
479
              continue;
480
            }
481
          else if (GET_CODE (PATTERN (insn)) == CLOBBER)
482
            continue;
483
          else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
484
            {
485
              /* An unconditional jump can be used to fill the delay slot
486
                 of a call, so search for a JUMP_INSN in any position.  */
487
              for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
488
                {
489
                  this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
490
                  if (JUMP_P (this_jump_insn))
491
                    break;
492
                }
493
            }
494
 
495
        default:
496
          break;
497
        }
498
 
499
      if (JUMP_P (this_jump_insn))
500
        {
501
          if (jump_count++ < 10)
502
            {
503
              if (any_uncondjump_p (this_jump_insn)
504
                  || GET_CODE (PATTERN (this_jump_insn)) == RETURN)
505
                {
506
                  next = JUMP_LABEL (this_jump_insn);
507
                  if (jump_insn == 0)
508
                    {
509
                      jump_insn = insn;
510
                      if (jump_target)
511
                        *jump_target = JUMP_LABEL (this_jump_insn);
512
                    }
513
                }
514
              else if (any_condjump_p (this_jump_insn))
515
                {
516
                  struct resources target_set, target_res;
517
                  struct resources fallthrough_res;
518
 
519
                  /* We can handle conditional branches here by following
520
                     both paths, and then IOR the results of the two paths
521
                     together, which will give us registers that are dead
522
                     on both paths.  Since this is expensive, we give it
523
                     a much higher cost than unconditional branches.  The
524
                     cost was chosen so that we will follow at most 1
525
                     conditional branch.  */
526
 
527
                  jump_count += 4;
528
                  if (jump_count >= 10)
529
                    break;
530
 
531
                  mark_referenced_resources (insn, &needed, 1);
532
 
533
                  /* For an annulled branch, mark_set_resources ignores slots
534
                     filled by instructions from the target.  This is correct
535
                     if the branch is not taken.  Since we are following both
536
                     paths from the branch, we must also compute correct info
537
                     if the branch is taken.  We do this by inverting all of
538
                     the INSN_FROM_TARGET_P bits, calling mark_set_resources,
539
                     and then inverting the INSN_FROM_TARGET_P bits again.  */
540
 
541
                  if (GET_CODE (PATTERN (insn)) == SEQUENCE
542
                      && INSN_ANNULLED_BRANCH_P (this_jump_insn))
543
                    {
544
                      for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
545
                        INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
546
                          = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
547
 
548
                      target_set = set;
549
                      mark_set_resources (insn, &target_set, 0,
550
                                          MARK_SRC_DEST_CALL);
551
 
552
                      for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
553
                        INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
554
                          = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
555
 
556
                      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
557
                    }
558
                  else
559
                    {
560
                      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
561
                      target_set = set;
562
                    }
563
 
564
                  target_res = *res;
565
                  COPY_HARD_REG_SET (scratch, target_set.regs);
566
                  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
567
                  AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
568
 
569
                  fallthrough_res = *res;
570
                  COPY_HARD_REG_SET (scratch, set.regs);
571
                  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
572
                  AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
573
 
574
                  find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
575
                                              &target_res, 0, jump_count,
576
                                              target_set, needed);
577
                  find_dead_or_set_registers (next,
578
                                              &fallthrough_res, 0, jump_count,
579
                                              set, needed);
580
                  IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
581
                  AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
582
                  break;
583
                }
584
              else
585
                break;
586
            }
587
          else
588
            {
589
              /* Don't try this optimization if we expired our jump count
590
                 above, since that would mean there may be an infinite loop
591
                 in the function being compiled.  */
592
              jump_insn = 0;
593
              break;
594
            }
595
        }
596
 
597
      mark_referenced_resources (insn, &needed, 1);
598
      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
599
 
600
      COPY_HARD_REG_SET (scratch, set.regs);
601
      AND_COMPL_HARD_REG_SET (scratch, needed.regs);
602
      AND_COMPL_HARD_REG_SET (res->regs, scratch);
603
    }
604
 
605
  return jump_insn;
606
}
607
 
608
/* Given X, a part of an insn, and a pointer to a `struct resource',
609
   RES, indicate which resources are modified by the insn. If
610
   MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
611
   set by the called routine.
612
 
613
   If IN_DEST is nonzero, it means we are inside a SET.  Otherwise,
614
   objects are being referenced instead of set.
615
 
616
   We never mark the insn as modifying the condition code unless it explicitly
617
   SETs CC0 even though this is not totally correct.  The reason for this is
618
   that we require a SET of CC0 to immediately precede the reference to CC0.
619
   So if some other insn sets CC0 as a side-effect, we know it cannot affect
620
   our computation and thus may be placed in a delay slot.  */
621
 
622
void
623
mark_set_resources (rtx x, struct resources *res, int in_dest,
624
                    enum mark_resource_type mark_type)
625
{
626
  enum rtx_code code;
627
  int i, j;
628
  unsigned int r;
629
  const char *format_ptr;
630
 
631
 restart:
632
 
633
  code = GET_CODE (x);
634
 
635
  switch (code)
636
    {
637
    case NOTE:
638
    case BARRIER:
639
    case CODE_LABEL:
640
    case USE:
641
    case CONST_INT:
642
    case CONST_DOUBLE:
643
    case CONST_VECTOR:
644
    case LABEL_REF:
645
    case SYMBOL_REF:
646
    case CONST:
647
    case PC:
648
      /* These don't set any resources.  */
649
      return;
650
 
651
    case CC0:
652
      if (in_dest)
653
        res->cc = 1;
654
      return;
655
 
656
    case CALL_INSN:
657
      /* Called routine modifies the condition code, memory, any registers
658
         that aren't saved across calls, global registers and anything
659
         explicitly CLOBBERed immediately after the CALL_INSN.  */
660
 
661
      if (mark_type == MARK_SRC_DEST_CALL)
662
        {
663
          rtx link;
664
 
665
          res->cc = res->memory = 1;
666
          for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
667
            if (call_used_regs[r] || global_regs[r])
668
              SET_HARD_REG_BIT (res->regs, r);
669
 
670
          for (link = CALL_INSN_FUNCTION_USAGE (x);
671
               link; link = XEXP (link, 1))
672
            if (GET_CODE (XEXP (link, 0)) == CLOBBER)
673
              mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
674
                                  MARK_SRC_DEST);
675
 
676
          /* Check for a REG_SETJMP.  If it exists, then we must
677
             assume that this call can clobber any register.  */
678
          if (find_reg_note (x, REG_SETJMP, NULL))
679
            SET_HARD_REG_SET (res->regs);
680
        }
681
 
682
      /* ... and also what its RTL says it modifies, if anything.  */
683
 
684
    case JUMP_INSN:
685
    case INSN:
686
 
687
        /* An insn consisting of just a CLOBBER (or USE) is just for flow
688
           and doesn't actually do anything, so we ignore it.  */
689
 
690
#ifdef INSN_SETS_ARE_DELAYED
691
      if (mark_type != MARK_SRC_DEST_CALL
692
          && INSN_SETS_ARE_DELAYED (x))
693
        return;
694
#endif
695
 
696
      x = PATTERN (x);
697
      if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
698
        goto restart;
699
      return;
700
 
701
    case SET:
702
      /* If the source of a SET is a CALL, this is actually done by
703
         the called routine.  So only include it if we are to include the
704
         effects of the calling routine.  */
705
 
706
      mark_set_resources (SET_DEST (x), res,
707
                          (mark_type == MARK_SRC_DEST_CALL
708
                           || GET_CODE (SET_SRC (x)) != CALL),
709
                          mark_type);
710
 
711
      mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
712
      return;
713
 
714
    case CLOBBER:
715
      mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
716
      return;
717
 
718
    case SEQUENCE:
719
      for (i = 0; i < XVECLEN (x, 0); i++)
720
        if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
721
               && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
722
          mark_set_resources (XVECEXP (x, 0, i), res, 0, mark_type);
723
      return;
724
 
725
    case POST_INC:
726
    case PRE_INC:
727
    case POST_DEC:
728
    case PRE_DEC:
729
      mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
730
      return;
731
 
732
    case PRE_MODIFY:
733
    case POST_MODIFY:
734
      mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
735
      mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
736
      mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
737
      return;
738
 
739
    case SIGN_EXTRACT:
740
    case ZERO_EXTRACT:
741
      mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
742
      mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
743
      mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
744
      return;
745
 
746
    case MEM:
747
      if (in_dest)
748
        {
749
          res->memory = 1;
750
          res->unch_memory |= MEM_READONLY_P (x);
751
          res->volatil |= MEM_VOLATILE_P (x);
752
        }
753
 
754
      mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
755
      return;
756
 
757
    case SUBREG:
758
      if (in_dest)
759
        {
760
          if (!REG_P (SUBREG_REG (x)))
761
            mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
762
          else
763
            {
764
              unsigned int regno = subreg_regno (x);
765
              unsigned int last_regno
766
                = regno + hard_regno_nregs[regno][GET_MODE (x)];
767
 
768
              gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
769
              for (r = regno; r < last_regno; r++)
770
                SET_HARD_REG_BIT (res->regs, r);
771
            }
772
        }
773
      return;
774
 
775
    case REG:
776
      if (in_dest)
777
        {
778
          unsigned int regno = REGNO (x);
779
          unsigned int last_regno
780
            = regno + hard_regno_nregs[regno][GET_MODE (x)];
781
 
782
          gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
783
          for (r = regno; r < last_regno; r++)
784
            SET_HARD_REG_BIT (res->regs, r);
785
        }
786
      return;
787
 
788
    case UNSPEC_VOLATILE:
789
    case ASM_INPUT:
790
      /* Traditional asm's are always volatile.  */
791
      res->volatil = 1;
792
      return;
793
 
794
    case TRAP_IF:
795
      res->volatil = 1;
796
      break;
797
 
798
    case ASM_OPERANDS:
799
      res->volatil |= MEM_VOLATILE_P (x);
800
 
801
      /* For all ASM_OPERANDS, we must traverse the vector of input operands.
802
         We can not just fall through here since then we would be confused
803
         by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
804
         traditional asms unlike their normal usage.  */
805
 
806
      for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
807
        mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
808
                            MARK_SRC_DEST);
809
      return;
810
 
811
    default:
812
      break;
813
    }
814
 
815
  /* Process each sub-expression and flag what it needs.  */
816
  format_ptr = GET_RTX_FORMAT (code);
817
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
818
    switch (*format_ptr++)
819
      {
820
      case 'e':
821
        mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
822
        break;
823
 
824
      case 'E':
825
        for (j = 0; j < XVECLEN (x, i); j++)
826
          mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
827
        break;
828
      }
829
}
830
 
831
/* Return TRUE if INSN is a return, possibly with a filled delay slot.  */
832
 
833
static bool
834
return_insn_p (rtx insn)
835
{
836
  if (JUMP_P (insn) && GET_CODE (PATTERN (insn)) == RETURN)
837
    return true;
838
 
839
  if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
840
    return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
841
 
842
  return false;
843
}
844
 
845
/* Set the resources that are live at TARGET.
846
 
847
   If TARGET is zero, we refer to the end of the current function and can
848
   return our precomputed value.
849
 
850
   Otherwise, we try to find out what is live by consulting the basic block
851
   information.  This is tricky, because we must consider the actions of
852
   reload and jump optimization, which occur after the basic block information
853
   has been computed.
854
 
855
   Accordingly, we proceed as follows::
856
 
857
   We find the previous BARRIER and look at all immediately following labels
858
   (with no intervening active insns) to see if any of them start a basic
859
   block.  If we hit the start of the function first, we use block 0.
860
 
861
   Once we have found a basic block and a corresponding first insns, we can
862
   accurately compute the live status from basic_block_live_regs and
863
   reg_renumber.  (By starting at a label following a BARRIER, we are immune
864
   to actions taken by reload and jump.)  Then we scan all insns between
865
   that point and our target.  For each CLOBBER (or for call-clobbered regs
866
   when we pass a CALL_INSN), mark the appropriate registers are dead.  For
867
   a SET, mark them as live.
868
 
869
   We have to be careful when using REG_DEAD notes because they are not
870
   updated by such things as find_equiv_reg.  So keep track of registers
871
   marked as dead that haven't been assigned to, and mark them dead at the
872
   next CODE_LABEL since reload and jump won't propagate values across labels.
873
 
874
   If we cannot find the start of a basic block (should be a very rare
875
   case, if it can happen at all), mark everything as potentially live.
876
 
877
   Next, scan forward from TARGET looking for things set or clobbered
878
   before they are used.  These are not live.
879
 
880
   Because we can be called many times on the same target, save our results
881
   in a hash table indexed by INSN_UID.  This is only done if the function
882
   init_resource_info () was invoked before we are called.  */
883
 
884
void
885
mark_target_live_regs (rtx insns, rtx target, struct resources *res)
886
{
887
  int b = -1;
888
  unsigned int i;
889
  struct target_info *tinfo = NULL;
890
  rtx insn;
891
  rtx jump_insn = 0;
892
  rtx jump_target;
893
  HARD_REG_SET scratch;
894
  struct resources set, needed;
895
 
896
  /* Handle end of function.  */
897
  if (target == 0)
898
    {
899
      *res = end_of_function_needs;
900
      return;
901
    }
902
 
903
  /* Handle return insn.  */
904
  else if (return_insn_p (target))
905
    {
906
      *res = end_of_function_needs;
907
      mark_referenced_resources (target, res, 0);
908
      return;
909
    }
910
 
911
  /* We have to assume memory is needed, but the CC isn't.  */
912
  res->memory = 1;
913
  res->volatil = res->unch_memory = 0;
914
  res->cc = 0;
915
 
916
  /* See if we have computed this value already.  */
917
  if (target_hash_table != NULL)
918
    {
919
      for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
920
           tinfo; tinfo = tinfo->next)
921
        if (tinfo->uid == INSN_UID (target))
922
          break;
923
 
924
      /* Start by getting the basic block number.  If we have saved
925
         information, we can get it from there unless the insn at the
926
         start of the basic block has been deleted.  */
927
      if (tinfo && tinfo->block != -1
928
          && ! INSN_DELETED_P (BB_HEAD (BASIC_BLOCK (tinfo->block))))
929
        b = tinfo->block;
930
    }
931
 
932
  if (b == -1)
933
    b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH);
934
 
935
  if (target_hash_table != NULL)
936
    {
937
      if (tinfo)
938
        {
939
          /* If the information is up-to-date, use it.  Otherwise, we will
940
             update it below.  */
941
          if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
942
            {
943
              COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
944
              return;
945
            }
946
        }
947
      else
948
        {
949
          /* Allocate a place to put our results and chain it into the
950
             hash table.  */
951
          tinfo = XNEW (struct target_info);
952
          tinfo->uid = INSN_UID (target);
953
          tinfo->block = b;
954
          tinfo->next
955
            = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
956
          target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
957
        }
958
    }
959
 
960
  CLEAR_HARD_REG_SET (pending_dead_regs);
961
 
962
  /* If we found a basic block, get the live registers from it and update
963
     them with anything set or killed between its start and the insn before
964
     TARGET.  Otherwise, we must assume everything is live.  */
965
  if (b != -1)
966
    {
967
      regset regs_live = BASIC_BLOCK (b)->il.rtl->global_live_at_start;
968
      unsigned int j;
969
      unsigned int regno;
970
      rtx start_insn, stop_insn;
971
      reg_set_iterator rsi;
972
 
973
      /* Compute hard regs live at start of block -- this is the real hard regs
974
         marked live, plus live pseudo regs that have been renumbered to
975
         hard regs.  */
976
 
977
      REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
978
 
979
      EXECUTE_IF_SET_IN_REG_SET (regs_live, FIRST_PSEUDO_REGISTER, i, rsi)
980
        {
981
          if (reg_renumber[i] >= 0)
982
            {
983
              regno = reg_renumber[i];
984
              for (j = regno;
985
                   j < regno + hard_regno_nregs[regno][PSEUDO_REGNO_MODE (i)];
986
                   j++)
987
                SET_HARD_REG_BIT (current_live_regs, j);
988
            }
989
        }
990
 
991
      /* Get starting and ending insn, handling the case where each might
992
         be a SEQUENCE.  */
993
      start_insn = (b == 0 ? insns : BB_HEAD (BASIC_BLOCK (b)));
994
      stop_insn = target;
995
 
996
      if (NONJUMP_INSN_P (start_insn)
997
          && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
998
        start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
999
 
1000
      if (NONJUMP_INSN_P (stop_insn)
1001
          && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
1002
        stop_insn = next_insn (PREV_INSN (stop_insn));
1003
 
1004
      for (insn = start_insn; insn != stop_insn;
1005
           insn = next_insn_no_annul (insn))
1006
        {
1007
          rtx link;
1008
          rtx real_insn = insn;
1009
          enum rtx_code code = GET_CODE (insn);
1010
 
1011
          /* If this insn is from the target of a branch, it isn't going to
1012
             be used in the sequel.  If it is used in both cases, this
1013
             test will not be true.  */
1014
          if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
1015
              && INSN_FROM_TARGET_P (insn))
1016
            continue;
1017
 
1018
          /* If this insn is a USE made by update_block, we care about the
1019
             underlying insn.  */
1020
          if (code == INSN && GET_CODE (PATTERN (insn)) == USE
1021
              && INSN_P (XEXP (PATTERN (insn), 0)))
1022
              real_insn = XEXP (PATTERN (insn), 0);
1023
 
1024
          if (CALL_P (real_insn))
1025
            {
1026
              /* CALL clobbers all call-used regs that aren't fixed except
1027
                 sp, ap, and fp.  Do this before setting the result of the
1028
                 call live.  */
1029
              AND_COMPL_HARD_REG_SET (current_live_regs,
1030
                                      regs_invalidated_by_call);
1031
 
1032
              /* A CALL_INSN sets any global register live, since it may
1033
                 have been modified by the call.  */
1034
              for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1035
                if (global_regs[i])
1036
                  SET_HARD_REG_BIT (current_live_regs, i);
1037
            }
1038
 
1039
          /* Mark anything killed in an insn to be deadened at the next
1040
             label.  Ignore USE insns; the only REG_DEAD notes will be for
1041
             parameters.  But they might be early.  A CALL_INSN will usually
1042
             clobber registers used for parameters.  It isn't worth bothering
1043
             with the unlikely case when it won't.  */
1044
          if ((NONJUMP_INSN_P (real_insn)
1045
               && GET_CODE (PATTERN (real_insn)) != USE
1046
               && GET_CODE (PATTERN (real_insn)) != CLOBBER)
1047
              || JUMP_P (real_insn)
1048
              || CALL_P (real_insn))
1049
            {
1050
              for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1051
                if (REG_NOTE_KIND (link) == REG_DEAD
1052
                    && REG_P (XEXP (link, 0))
1053
                    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1054
                  {
1055
                    unsigned int first_regno = REGNO (XEXP (link, 0));
1056
                    unsigned int last_regno
1057
                      = (first_regno
1058
                         + hard_regno_nregs[first_regno]
1059
                                           [GET_MODE (XEXP (link, 0))]);
1060
 
1061
                    for (i = first_regno; i < last_regno; i++)
1062
                      SET_HARD_REG_BIT (pending_dead_regs, i);
1063
                  }
1064
 
1065
              note_stores (PATTERN (real_insn), update_live_status, NULL);
1066
 
1067
              /* If any registers were unused after this insn, kill them.
1068
                 These notes will always be accurate.  */
1069
              for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1070
                if (REG_NOTE_KIND (link) == REG_UNUSED
1071
                    && REG_P (XEXP (link, 0))
1072
                    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1073
                  {
1074
                    unsigned int first_regno = REGNO (XEXP (link, 0));
1075
                    unsigned int last_regno
1076
                      = (first_regno
1077
                         + hard_regno_nregs[first_regno]
1078
                                           [GET_MODE (XEXP (link, 0))]);
1079
 
1080
                    for (i = first_regno; i < last_regno; i++)
1081
                      CLEAR_HARD_REG_BIT (current_live_regs, i);
1082
                  }
1083
            }
1084
 
1085
          else if (LABEL_P (real_insn))
1086
            {
1087
              /* A label clobbers the pending dead registers since neither
1088
                 reload nor jump will propagate a value across a label.  */
1089
              AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
1090
              CLEAR_HARD_REG_SET (pending_dead_regs);
1091
            }
1092
 
1093
          /* The beginning of the epilogue corresponds to the end of the
1094
             RTL chain when there are no epilogue insns.  Certain resources
1095
             are implicitly required at that point.  */
1096
          else if (NOTE_P (real_insn)
1097
                   && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1098
            IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
1099
        }
1100
 
1101
      COPY_HARD_REG_SET (res->regs, current_live_regs);
1102
      if (tinfo != NULL)
1103
        {
1104
          tinfo->block = b;
1105
          tinfo->bb_tick = bb_ticks[b];
1106
        }
1107
    }
1108
  else
1109
    /* We didn't find the start of a basic block.  Assume everything
1110
       in use.  This should happen only extremely rarely.  */
1111
    SET_HARD_REG_SET (res->regs);
1112
 
1113
  CLEAR_RESOURCE (&set);
1114
  CLEAR_RESOURCE (&needed);
1115
 
1116
  jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
1117
                                          set, needed);
1118
 
1119
  /* If we hit an unconditional branch, we have another way of finding out
1120
     what is live: we can see what is live at the branch target and include
1121
     anything used but not set before the branch.  We add the live
1122
     resources found using the test below to those found until now.  */
1123
 
1124
  if (jump_insn)
1125
    {
1126
      struct resources new_resources;
1127
      rtx stop_insn = next_active_insn (jump_insn);
1128
 
1129
      mark_target_live_regs (insns, next_active_insn (jump_target),
1130
                             &new_resources);
1131
      CLEAR_RESOURCE (&set);
1132
      CLEAR_RESOURCE (&needed);
1133
 
1134
      /* Include JUMP_INSN in the needed registers.  */
1135
      for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1136
        {
1137
          mark_referenced_resources (insn, &needed, 1);
1138
 
1139
          COPY_HARD_REG_SET (scratch, needed.regs);
1140
          AND_COMPL_HARD_REG_SET (scratch, set.regs);
1141
          IOR_HARD_REG_SET (new_resources.regs, scratch);
1142
 
1143
          mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1144
        }
1145
 
1146
      IOR_HARD_REG_SET (res->regs, new_resources.regs);
1147
    }
1148
 
1149
  if (tinfo != NULL)
1150
    {
1151
      COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
1152
    }
1153
}
1154
 
1155
/* Initialize the resources required by mark_target_live_regs ().
1156
   This should be invoked before the first call to mark_target_live_regs.  */
1157
 
1158
void
1159
init_resource_info (rtx epilogue_insn)
1160
{
1161
  int i;
1162
 
1163
  /* Indicate what resources are required to be valid at the end of the current
1164
     function.  The condition code never is and memory always is.  If the
1165
     frame pointer is needed, it is and so is the stack pointer unless
1166
     EXIT_IGNORE_STACK is nonzero.  If the frame pointer is not needed, the
1167
     stack pointer is.  Registers used to return the function value are
1168
     needed.  Registers holding global variables are needed.  */
1169
 
1170
  end_of_function_needs.cc = 0;
1171
  end_of_function_needs.memory = 1;
1172
  end_of_function_needs.unch_memory = 0;
1173
  CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1174
 
1175
  if (frame_pointer_needed)
1176
    {
1177
      SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1178
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1179
      SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
1180
#endif
1181
      if (! EXIT_IGNORE_STACK
1182
          || current_function_sp_is_unchanging)
1183
        SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1184
    }
1185
  else
1186
    SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1187
 
1188
  if (current_function_return_rtx != 0)
1189
    mark_referenced_resources (current_function_return_rtx,
1190
                               &end_of_function_needs, 1);
1191
 
1192
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1193
    if (global_regs[i]
1194
#ifdef EPILOGUE_USES
1195
        || EPILOGUE_USES (i)
1196
#endif
1197
        )
1198
      SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1199
 
1200
  /* The registers required to be live at the end of the function are
1201
     represented in the flow information as being dead just prior to
1202
     reaching the end of the function.  For example, the return of a value
1203
     might be represented by a USE of the return register immediately
1204
     followed by an unconditional jump to the return label where the
1205
     return label is the end of the RTL chain.  The end of the RTL chain
1206
     is then taken to mean that the return register is live.
1207
 
1208
     This sequence is no longer maintained when epilogue instructions are
1209
     added to the RTL chain.  To reconstruct the original meaning, the
1210
     start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1211
     point where these registers become live (start_of_epilogue_needs).
1212
     If epilogue instructions are present, the registers set by those
1213
     instructions won't have been processed by flow.  Thus, those
1214
     registers are additionally required at the end of the RTL chain
1215
     (end_of_function_needs).  */
1216
 
1217
  start_of_epilogue_needs = end_of_function_needs;
1218
 
1219
  while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1220
    {
1221
      mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1222
                          MARK_SRC_DEST_CALL);
1223
      if (return_insn_p (epilogue_insn))
1224
        break;
1225
    }
1226
 
1227
  /* Allocate and initialize the tables used by mark_target_live_regs.  */
1228
  target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
1229
  bb_ticks = XCNEWVEC (int, last_basic_block);
1230
}
1231
 
1232
/* Free up the resources allocated to mark_target_live_regs ().  This
1233
   should be invoked after the last call to mark_target_live_regs ().  */
1234
 
1235
void
1236
free_resource_info (void)
1237
{
1238
  if (target_hash_table != NULL)
1239
    {
1240
      int i;
1241
 
1242
      for (i = 0; i < TARGET_HASH_PRIME; ++i)
1243
        {
1244
          struct target_info *ti = target_hash_table[i];
1245
 
1246
          while (ti)
1247
            {
1248
              struct target_info *next = ti->next;
1249
              free (ti);
1250
              ti = next;
1251
            }
1252
        }
1253
 
1254
      free (target_hash_table);
1255
      target_hash_table = NULL;
1256
    }
1257
 
1258
  if (bb_ticks != NULL)
1259
    {
1260
      free (bb_ticks);
1261
      bb_ticks = NULL;
1262
    }
1263
}
1264
 
1265
/* Clear any hashed information that we have stored for INSN.  */
1266
 
1267
void
1268
clear_hashed_info_for_insn (rtx insn)
1269
{
1270
  struct target_info *tinfo;
1271
 
1272
  if (target_hash_table != NULL)
1273
    {
1274
      for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1275
           tinfo; tinfo = tinfo->next)
1276
        if (tinfo->uid == INSN_UID (insn))
1277
          break;
1278
 
1279
      if (tinfo)
1280
        tinfo->block = -1;
1281
    }
1282
}
1283
 
1284
/* Increment the tick count for the basic block that contains INSN.  */
1285
 
1286
void
1287
incr_ticks_for_insn (rtx insn)
1288
{
1289
  int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH);
1290
 
1291
  if (b != -1)
1292
    bb_ticks[b]++;
1293
}
1294
 
1295
/* Add TRIAL to the set of resources used at the end of the current
1296
   function.  */
1297
void
1298
mark_end_of_function_resources (rtx trial, int include_delayed_effects)
1299
{
1300
  mark_referenced_resources (trial, &end_of_function_needs,
1301
                             include_delayed_effects);
1302
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.