OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [df-scan.c] - Blame information for rev 868

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* FIXME: We need to go back and add the warning messages about code
2
   moved across setjmp.  */
3
 
4
 
5
/* Scanning of rtl for dataflow analysis.
6
   Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
7
   Free Software Foundation, Inc.
8
   Originally contributed by Michael P. Hayes
9
             (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
10
   Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
11
             and Kenneth Zadeck (zadeck@naturalbridge.com).
12
 
13
This file is part of GCC.
14
 
15
GCC is free software; you can redistribute it and/or modify it under
16
the terms of the GNU General Public License as published by the Free
17
Software Foundation; either version 3, or (at your option) any later
18
version.
19
 
20
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
21
WARRANTY; without even the implied warranty of MERCHANTABILITY or
22
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
23
for more details.
24
 
25
You should have received a copy of the GNU General Public License
26
along with GCC; see the file COPYING3.  If not see
27
<http://www.gnu.org/licenses/>.  */
28
 
29
#include "config.h"
30
#include "system.h"
31
#include "coretypes.h"
32
#include "tm.h"
33
#include "rtl.h"
34
#include "tm_p.h"
35
#include "insn-config.h"
36
#include "recog.h"
37
#include "function.h"
38
#include "regs.h"
39
#include "output.h"
40
#include "alloc-pool.h"
41
#include "flags.h"
42
#include "hard-reg-set.h"
43
#include "basic-block.h"
44
#include "sbitmap.h"
45
#include "bitmap.h"
46
#include "timevar.h"
47
#include "tree.h"
48
#include "target.h"
49
#include "target-def.h"
50
#include "df.h"
51
 
52
#ifndef HAVE_epilogue
53
#define HAVE_epilogue 0
54
#endif
55
#ifndef HAVE_prologue
56
#define HAVE_prologue 0
57
#endif
58
#ifndef HAVE_sibcall_epilogue
59
#define HAVE_sibcall_epilogue 0
60
#endif
61
 
62
#ifndef EPILOGUE_USES
63
#define EPILOGUE_USES(REGNO)  0
64
#endif
65
 
66
/* The bitmap_obstack is used to hold some static variables that
67
   should not be reset after each function is compiled.  */
68
 
69
static bitmap_obstack persistent_obstack;
70
 
71
/* The set of hard registers in eliminables[i].from. */
72
 
73
static HARD_REG_SET elim_reg_set;
74
 
75
/* This is a bitmap copy of regs_invalidated_by_call so that we can
76
   easily add it into bitmaps, etc. */
77
 
78
bitmap df_invalidated_by_call = NULL;
79
 
80
/* Initialize ur_in and ur_out as if all hard registers were partially
81
   available.  */
82
 
83
static void df_ref_record (struct dataflow *, rtx, rtx *,
84
                           basic_block, rtx, enum df_ref_type,
85
                           enum df_ref_flags, bool record_live);
86
static void df_def_record_1 (struct dataflow *, rtx, basic_block, rtx,
87
                             enum df_ref_flags, bool record_live);
88
static void df_defs_record (struct dataflow *, rtx, basic_block, rtx);
89
static void df_uses_record (struct dataflow *, rtx *, enum df_ref_type,
90
                            basic_block, rtx, enum df_ref_flags);
91
 
92
static void df_insn_refs_record (struct dataflow *, basic_block, rtx);
93
static void df_bb_refs_record (struct dataflow *, basic_block);
94
static void df_refs_record (struct dataflow *, bitmap);
95
static struct df_ref *df_ref_create_structure (struct dataflow *, rtx, rtx *,
96
                                               basic_block, rtx, enum df_ref_type,
97
                                               enum df_ref_flags);
98
static void df_record_entry_block_defs (struct dataflow *);
99
static void df_record_exit_block_uses (struct dataflow *);
100
static void df_grow_reg_info (struct dataflow *, struct df_ref_info *);
101
static void df_grow_ref_info (struct df_ref_info *, unsigned int);
102
static void df_grow_insn_info (struct df *);
103
 
104
 
105
/*----------------------------------------------------------------------------
106
   SCANNING DATAFLOW PROBLEM
107
 
108
   There are several ways in which scanning looks just like the other
109
   dataflow problems.  It shares the all the mechanisms for local info
110
   as well as basic block info.  Where it differs is when and how often
111
   it gets run.  It also has no need for the iterative solver.
112
----------------------------------------------------------------------------*/
113
 
114
/* Problem data for the scanning dataflow function.  */
115
struct df_scan_problem_data
116
{
117
  alloc_pool ref_pool;
118
  alloc_pool insn_pool;
119
  alloc_pool reg_pool;
120
  alloc_pool mw_reg_pool;
121
  alloc_pool mw_link_pool;
122
};
123
 
124
typedef struct df_scan_bb_info *df_scan_bb_info_t;
125
 
126
static void
127
df_scan_free_internal (struct dataflow *dflow)
128
{
129
  struct df *df = dflow->df;
130
  struct df_scan_problem_data *problem_data
131
    = (struct df_scan_problem_data *) dflow->problem_data;
132
 
133
  free (df->def_info.regs);
134
  free (df->def_info.refs);
135
  memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
136
 
137
  free (df->use_info.regs);
138
  free (df->use_info.refs);
139
  memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
140
 
141
  free (df->insns);
142
  df->insns = NULL;
143
  df->insns_size = 0;
144
 
145
  free (dflow->block_info);
146
  dflow->block_info = NULL;
147
  dflow->block_info_size = 0;
148
 
149
  BITMAP_FREE (df->hardware_regs_used);
150
  BITMAP_FREE (df->entry_block_defs);
151
  BITMAP_FREE (df->exit_block_uses);
152
 
153
  free_alloc_pool (dflow->block_pool);
154
  free_alloc_pool (problem_data->ref_pool);
155
  free_alloc_pool (problem_data->insn_pool);
156
  free_alloc_pool (problem_data->reg_pool);
157
  free_alloc_pool (problem_data->mw_reg_pool);
158
  free_alloc_pool (problem_data->mw_link_pool);
159
}
160
 
161
 
162
/* Get basic block info.  */
163
 
164
struct df_scan_bb_info *
165
df_scan_get_bb_info (struct dataflow *dflow, unsigned int index)
166
{
167
  gcc_assert (index < dflow->block_info_size);
168
  return (struct df_scan_bb_info *) dflow->block_info[index];
169
}
170
 
171
 
172
/* Set basic block info.  */
173
 
174
static void
175
df_scan_set_bb_info (struct dataflow *dflow, unsigned int index,
176
                     struct df_scan_bb_info *bb_info)
177
{
178
  gcc_assert (index < dflow->block_info_size);
179
  dflow->block_info[index] = (void *) bb_info;
180
}
181
 
182
 
183
/* Free basic block info.  */
184
 
185
static void
186
df_scan_free_bb_info (struct dataflow *dflow, basic_block bb, void *vbb_info)
187
{
188
  struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
189
  if (bb_info)
190
    {
191
      df_bb_refs_delete (dflow, bb->index);
192
      pool_free (dflow->block_pool, bb_info);
193
    }
194
}
195
 
196
 
197
/* Allocate the problem data for the scanning problem.  This should be
198
   called when the problem is created or when the entire function is to
199
   be rescanned.  */
200
 
201
static void
202
df_scan_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
203
               bitmap all_blocks ATTRIBUTE_UNUSED)
204
{
205
  struct df *df = dflow->df;
206
  struct df_scan_problem_data *problem_data;
207
  unsigned int insn_num = get_max_uid () + 1;
208
  unsigned int block_size = 50;
209
  unsigned int bb_index;
210
  bitmap_iterator bi;
211
 
212
  /* Given the number of pools, this is really faster than tearing
213
     everything apart.  */
214
  if (dflow->problem_data)
215
    df_scan_free_internal (dflow);
216
 
217
  dflow->block_pool
218
    = create_alloc_pool ("df_scan_block pool",
219
                         sizeof (struct df_scan_bb_info),
220
                         block_size);
221
 
222
  problem_data = XNEW (struct df_scan_problem_data);
223
  dflow->problem_data = problem_data;
224
 
225
  problem_data->ref_pool
226
    = create_alloc_pool ("df_scan_ref pool",
227
                         sizeof (struct df_ref), block_size);
228
  problem_data->insn_pool
229
    = create_alloc_pool ("df_scan_insn pool",
230
                         sizeof (struct df_insn_info), block_size);
231
  problem_data->reg_pool
232
    = create_alloc_pool ("df_scan_reg pool",
233
                         sizeof (struct df_reg_info), block_size);
234
  problem_data->mw_reg_pool
235
    = create_alloc_pool ("df_scan_mw_reg pool",
236
                         sizeof (struct df_mw_hardreg), block_size);
237
  problem_data->mw_link_pool
238
    = create_alloc_pool ("df_scan_mw_link pool",
239
                         sizeof (struct df_link), block_size);
240
 
241
  insn_num += insn_num / 4;
242
  df_grow_reg_info (dflow, &df->def_info);
243
  df_grow_ref_info (&df->def_info, insn_num);
244
 
245
  df_grow_reg_info (dflow, &df->use_info);
246
  df_grow_ref_info (&df->use_info, insn_num *2);
247
 
248
  df_grow_insn_info (df);
249
  df_grow_bb_info (dflow);
250
 
251
  EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
252
    {
253
      struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb_index);
254
      if (!bb_info)
255
        {
256
          bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
257
          df_scan_set_bb_info (dflow, bb_index, bb_info);
258
        }
259
      bb_info->artificial_defs = NULL;
260
      bb_info->artificial_uses = NULL;
261
    }
262
 
263
  df->hardware_regs_used = BITMAP_ALLOC (NULL);
264
  df->entry_block_defs = BITMAP_ALLOC (NULL);
265
  df->exit_block_uses = BITMAP_ALLOC (NULL);
266
}
267
 
268
 
269
/* Free all of the data associated with the scan problem.  */
270
 
271
static void
272
df_scan_free (struct dataflow *dflow)
273
{
274
  struct df *df = dflow->df;
275
 
276
  if (dflow->problem_data)
277
    {
278
      df_scan_free_internal (dflow);
279
      free (dflow->problem_data);
280
    }
281
 
282
  if (df->blocks_to_scan)
283
    BITMAP_FREE (df->blocks_to_scan);
284
 
285
  if (df->blocks_to_analyze)
286
    BITMAP_FREE (df->blocks_to_analyze);
287
 
288
  free (dflow);
289
}
290
 
291
static void
292
df_scan_dump (struct dataflow *dflow ATTRIBUTE_UNUSED, FILE *file ATTRIBUTE_UNUSED)
293
{
294
  struct df *df = dflow->df;
295
  int i;
296
 
297
  fprintf (file, "  invalidated by call \t");
298
  dump_bitmap (file, df_invalidated_by_call);
299
  fprintf (file, "  hardware regs used \t");
300
  dump_bitmap (file, df->hardware_regs_used);
301
  fprintf (file, "  entry block uses \t");
302
  dump_bitmap (file, df->entry_block_defs);
303
  fprintf (file, "  exit block uses \t");
304
  dump_bitmap (file, df->exit_block_uses);
305
  fprintf (file, "  regs ever live \t");
306
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
307
    if (regs_ever_live[i])
308
      fprintf (file, "%d ", i);
309
  fprintf (file, "\n");
310
}
311
 
312
static struct df_problem problem_SCAN =
313
{
314
  DF_SCAN,                    /* Problem id.  */
315
  DF_NONE,                    /* Direction.  */
316
  df_scan_alloc,              /* Allocate the problem specific data.  */
317
  NULL,                       /* Reset global information.  */
318
  df_scan_free_bb_info,       /* Free basic block info.  */
319
  NULL,                       /* Local compute function.  */
320
  NULL,                       /* Init the solution specific data.  */
321
  NULL,                       /* Iterative solver.  */
322
  NULL,                       /* Confluence operator 0.  */
323
  NULL,                       /* Confluence operator n.  */
324
  NULL,                       /* Transfer function.  */
325
  NULL,                       /* Finalize function.  */
326
  df_scan_free,               /* Free all of the problem information.  */
327
  df_scan_dump,               /* Debugging.  */
328
  NULL,                       /* Dependent problem.  */
329
 
330
};
331
 
332
 
333
/* Create a new DATAFLOW instance and add it to an existing instance
334
   of DF.  The returned structure is what is used to get at the
335
   solution.  */
336
 
337
struct dataflow *
338
df_scan_add_problem (struct df *df, int flags)
339
{
340
  return df_add_problem (df, &problem_SCAN, flags);
341
}
342
 
343
/*----------------------------------------------------------------------------
344
   Storage Allocation Utilities
345
----------------------------------------------------------------------------*/
346
 
347
 
348
/* First, grow the reg_info information.  If the current size is less than
349
   the number of psuedos, grow to 25% more than the number of
350
   pseudos.
351
 
352
   Second, assure that all of the slots up to max_reg_num have been
353
   filled with reg_info structures.  */
354
 
355
static void
356
df_grow_reg_info (struct dataflow *dflow, struct df_ref_info *ref_info)
357
{
358
  unsigned int max_reg = max_reg_num ();
359
  unsigned int new_size = max_reg;
360
  struct df_scan_problem_data *problem_data
361
    = (struct df_scan_problem_data *) dflow->problem_data;
362
  unsigned int i;
363
 
364
  if (ref_info->regs_size < new_size)
365
    {
366
      new_size += new_size / 4;
367
      ref_info->regs = xrealloc (ref_info->regs,
368
                                 new_size *sizeof (struct df_reg_info*));
369
      ref_info->regs_size = new_size;
370
    }
371
 
372
  for (i = ref_info->regs_inited; i < max_reg; i++)
373
    {
374
      struct df_reg_info *reg_info = pool_alloc (problem_data->reg_pool);
375
      memset (reg_info, 0, sizeof (struct df_reg_info));
376
      ref_info->regs[i] = reg_info;
377
    }
378
 
379
  ref_info->regs_inited = max_reg;
380
}
381
 
382
 
383
/* Grow the ref information.  */
384
 
385
static void
386
df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
387
{
388
  if (ref_info->refs_size < new_size)
389
    {
390
      ref_info->refs = xrealloc (ref_info->refs,
391
                                 new_size *sizeof (struct df_ref *));
392
      memset (ref_info->refs + ref_info->refs_size, 0,
393
              (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
394
      ref_info->refs_size = new_size;
395
    }
396
}
397
 
398
 
399
/* Grow the ref information.  If the current size is less than the
400
   number of instructions, grow to 25% more than the number of
401
   instructions.  */
402
 
403
static void
404
df_grow_insn_info (struct df *df)
405
{
406
  unsigned int new_size = get_max_uid () + 1;
407
  if (df->insns_size < new_size)
408
    {
409
      new_size += new_size / 4;
410
      df->insns = xrealloc (df->insns,
411
                            new_size *sizeof (struct df_insn_info *));
412
      memset (df->insns + df->insns_size, 0,
413
              (new_size - df->insns_size) *sizeof (struct df_insn_info *));
414
      df->insns_size = new_size;
415
    }
416
}
417
 
418
 
419
 
420
 
421
/*----------------------------------------------------------------------------
422
   PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
423
----------------------------------------------------------------------------*/
424
 
425
/* Rescan some BLOCKS or all the blocks defined by the last call to
426
   df_set_blocks if BLOCKS is NULL);  */
427
 
428
void
429
df_rescan_blocks (struct df *df, bitmap blocks)
430
{
431
  bitmap local_blocks_to_scan = BITMAP_ALLOC (NULL);
432
 
433
  struct dataflow *dflow = df->problems_by_index[DF_SCAN];
434
  basic_block bb;
435
 
436
  df->def_info.refs_organized = false;
437
  df->use_info.refs_organized = false;
438
 
439
  if (blocks)
440
    {
441
      int i;
442
      unsigned int bb_index;
443
      bitmap_iterator bi;
444
      bool cleared_bits = false;
445
 
446
      /* Need to assure that there are space in all of the tables.  */
447
      unsigned int insn_num = get_max_uid () + 1;
448
      insn_num += insn_num / 4;
449
 
450
      df_grow_reg_info (dflow, &df->def_info);
451
      df_grow_ref_info (&df->def_info, insn_num);
452
 
453
      df_grow_reg_info (dflow, &df->use_info);
454
      df_grow_ref_info (&df->use_info, insn_num *2);
455
 
456
      df_grow_insn_info (df);
457
      df_grow_bb_info (dflow);
458
 
459
      bitmap_copy (local_blocks_to_scan, blocks);
460
 
461
      EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
462
        {
463
          basic_block bb = BASIC_BLOCK (bb_index);
464
          if (!bb)
465
            {
466
              bitmap_clear_bit (local_blocks_to_scan, bb_index);
467
              cleared_bits = true;
468
            }
469
        }
470
 
471
      if (cleared_bits)
472
        bitmap_copy (blocks, local_blocks_to_scan);
473
 
474
      df->def_info.add_refs_inline = true;
475
      df->use_info.add_refs_inline = true;
476
 
477
      for (i = df->num_problems_defined; i; i--)
478
        {
479
          bitmap blocks_to_reset = NULL;
480
          if (dflow->problem->reset_fun)
481
            {
482
              if (!blocks_to_reset)
483
                {
484
                  blocks_to_reset = BITMAP_ALLOC (NULL);
485
                  bitmap_copy (blocks_to_reset, local_blocks_to_scan);
486
                  if (df->blocks_to_scan)
487
                    bitmap_ior_into (blocks_to_reset, df->blocks_to_scan);
488
                }
489
              dflow->problem->reset_fun (dflow, blocks_to_reset);
490
            }
491
          if (blocks_to_reset)
492
            BITMAP_FREE (blocks_to_reset);
493
        }
494
 
495
      df_refs_delete (dflow, local_blocks_to_scan);
496
 
497
      /* This may be a mistake, but if an explicit blocks is passed in
498
         and the set of blocks to analyze has been explicitly set, add
499
         the extra blocks to blocks_to_analyze.  The alternative is to
500
         put an assert here.  We do not want this to just go by
501
         silently or else we may get storage leaks.  */
502
      if (df->blocks_to_analyze)
503
        bitmap_ior_into (df->blocks_to_analyze, blocks);
504
    }
505
  else
506
    {
507
      /* If we are going to do everything, just reallocate everything.
508
         Most stuff is allocated in pools so this is faster than
509
         walking it.  */
510
      if (df->blocks_to_analyze)
511
        bitmap_copy (local_blocks_to_scan, df->blocks_to_analyze);
512
      else
513
        FOR_ALL_BB (bb)
514
          {
515
            bitmap_set_bit (local_blocks_to_scan, bb->index);
516
          }
517
      df_scan_alloc (dflow, local_blocks_to_scan, NULL);
518
 
519
      df->def_info.add_refs_inline = false;
520
      df->use_info.add_refs_inline = false;
521
    }
522
 
523
  df_refs_record (dflow, local_blocks_to_scan);
524
#if 0
525
  bitmap_print (stderr, local_blocks_to_scan, "scanning: ", "\n");
526
#endif
527
 
528
  if (!df->blocks_to_scan)
529
    df->blocks_to_scan = BITMAP_ALLOC (NULL);
530
 
531
  bitmap_ior_into (df->blocks_to_scan, local_blocks_to_scan);
532
  BITMAP_FREE (local_blocks_to_scan);
533
}
534
 
535
 
536
/* Create a new ref of type DF_REF_TYPE for register REG at address
537
   LOC within INSN of BB.  */
538
 
539
struct df_ref *
540
df_ref_create (struct df *df, rtx reg, rtx *loc, rtx insn,
541
               basic_block bb,
542
               enum df_ref_type ref_type,
543
               enum df_ref_flags ref_flags)
544
{
545
  struct dataflow *dflow = df->problems_by_index[DF_SCAN];
546
  struct df_scan_bb_info *bb_info;
547
 
548
  df_grow_reg_info (dflow, &df->use_info);
549
  df_grow_reg_info (dflow, &df->def_info);
550
  df_grow_bb_info (dflow);
551
 
552
  /* Make sure there is the bb_info for this block.  */
553
  bb_info = df_scan_get_bb_info (dflow, bb->index);
554
  if (!bb_info)
555
    {
556
      bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
557
      df_scan_set_bb_info (dflow, bb->index, bb_info);
558
      bb_info->artificial_defs = NULL;
559
      bb_info->artificial_uses = NULL;
560
    }
561
 
562
  if (ref_type == DF_REF_REG_DEF)
563
    df->def_info.add_refs_inline = true;
564
  else
565
    df->use_info.add_refs_inline = true;
566
 
567
  return df_ref_create_structure (dflow, reg, loc, bb, insn, ref_type, ref_flags);
568
}
569
 
570
 
571
 
572
/*----------------------------------------------------------------------------
573
   UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
574
----------------------------------------------------------------------------*/
575
 
576
 
577
/* Get the artificial uses for a basic block.  */
578
 
579
struct df_ref *
580
df_get_artificial_defs (struct df *df, unsigned int bb_index)
581
{
582
  struct dataflow *dflow = df->problems_by_index[DF_SCAN];
583
  return df_scan_get_bb_info (dflow, bb_index)->artificial_defs;
584
}
585
 
586
 
587
/* Get the artificial uses for a basic block.  */
588
 
589
struct df_ref *
590
df_get_artificial_uses (struct df *df, unsigned int bb_index)
591
{
592
  struct dataflow *dflow = df->problems_by_index[DF_SCAN];
593
  return df_scan_get_bb_info (dflow, bb_index)->artificial_uses;
594
}
595
 
596
 
597
/* Link REF at the front of reg_use or reg_def chain for REGNO.  */
598
 
599
void
600
df_reg_chain_create (struct df_reg_info *reg_info,
601
                     struct df_ref *ref)
602
{
603
  struct df_ref *head = reg_info->reg_chain;
604
  reg_info->reg_chain = ref;
605
 
606
  DF_REF_NEXT_REG (ref) = head;
607
 
608
  /* We cannot actually link to the head of the chain.  */
609
  DF_REF_PREV_REG (ref) = NULL;
610
 
611
  if (head)
612
    DF_REF_PREV_REG (head) = ref;
613
}
614
 
615
 
616
/* Remove REF from the CHAIN.  Return the head of the chain.  This
617
   will be CHAIN unless the REF was at the beginning of the chain.  */
618
 
619
static struct df_ref *
620
df_ref_unlink (struct df_ref *chain, struct df_ref *ref)
621
{
622
  struct df_ref *orig_chain = chain;
623
  struct df_ref *prev = NULL;
624
  while (chain)
625
    {
626
      if (chain == ref)
627
        {
628
          if (prev)
629
            {
630
              prev->next_ref = ref->next_ref;
631
              ref->next_ref = NULL;
632
              return orig_chain;
633
            }
634
          else
635
            {
636
              chain = ref->next_ref;
637
              ref->next_ref = NULL;
638
              return chain;
639
            }
640
        }
641
 
642
      prev = chain;
643
      chain = chain->next_ref;
644
    }
645
 
646
  /* Someone passed in a ref that was not in the chain.  */
647
  gcc_unreachable ();
648
  return NULL;
649
}
650
 
651
 
652
/* Unlink and delete REF at the reg_use or reg_def chain.  Also delete
653
   the def-use or use-def chain if it exists.  Returns the next ref in
654
   uses or defs chain.  */
655
 
656
struct df_ref *
657
df_reg_chain_unlink (struct dataflow *dflow, struct df_ref *ref)
658
{
659
  struct df *df = dflow->df;
660
  struct df_ref *next = DF_REF_NEXT_REG (ref);
661
  struct df_ref *prev = DF_REF_PREV_REG (ref);
662
  struct df_scan_problem_data *problem_data
663
    = (struct df_scan_problem_data *) dflow->problem_data;
664
  struct df_reg_info *reg_info;
665
  struct df_ref *next_ref = ref->next_ref;
666
  unsigned int id = DF_REF_ID (ref);
667
 
668
  if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
669
    {
670
      reg_info = DF_REG_DEF_GET (df, DF_REF_REGNO (ref));
671
      df->def_info.bitmap_size--;
672
      if (df->def_info.refs && (id < df->def_info.refs_size))
673
        DF_DEFS_SET (df, id, NULL);
674
    }
675
  else
676
    {
677
      reg_info = DF_REG_USE_GET (df, DF_REF_REGNO (ref));
678
      df->use_info.bitmap_size--;
679
      if (df->use_info.refs && (id < df->use_info.refs_size))
680
        DF_USES_SET (df, id, NULL);
681
    }
682
 
683
  /* Delete any def-use or use-def chains that start here.  */
684
  if (DF_REF_CHAIN (ref))
685
    df_chain_unlink (df->problems_by_index[DF_CHAIN], ref, NULL);
686
 
687
  reg_info->n_refs--;
688
 
689
  /* Unlink from the reg chain.  If there is no prev, this is the
690
     first of the list.  If not, just join the next and prev.  */
691
  if (prev)
692
    {
693
      DF_REF_NEXT_REG (prev) = next;
694
      if (next)
695
        DF_REF_PREV_REG (next) = prev;
696
    }
697
  else
698
    {
699
      reg_info->reg_chain = next;
700
      if (next)
701
        DF_REF_PREV_REG (next) = NULL;
702
    }
703
 
704
  pool_free (problem_data->ref_pool, ref);
705
  return next_ref;
706
}
707
 
708
 
709
/* Unlink REF from all def-use/use-def chains, etc.  */
710
 
711
void
712
df_ref_remove (struct df *df, struct df_ref *ref)
713
{
714
  struct dataflow *dflow = df->problems_by_index[DF_SCAN];
715
  if (DF_REF_REG_DEF_P (ref))
716
    {
717
      if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
718
        {
719
          struct df_scan_bb_info *bb_info
720
            = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
721
          bb_info->artificial_defs
722
            = df_ref_unlink (bb_info->artificial_defs, ref);
723
        }
724
      else
725
        DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref))
726
          = df_ref_unlink (DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)), ref);
727
 
728
      if (df->def_info.add_refs_inline)
729
        DF_DEFS_SET (df, DF_REF_ID (ref), NULL);
730
    }
731
  else
732
    {
733
      if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
734
        {
735
          struct df_scan_bb_info *bb_info
736
            = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
737
          bb_info->artificial_uses
738
            = df_ref_unlink (bb_info->artificial_uses, ref);
739
        }
740
      else
741
        DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref))
742
          = df_ref_unlink (DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)), ref);
743
 
744
      if (df->use_info.add_refs_inline)
745
        DF_USES_SET (df, DF_REF_ID (ref), NULL);
746
    }
747
 
748
  df_reg_chain_unlink (dflow, ref);
749
}
750
 
751
 
752
/* Create the insn record for INSN.  If there was one there, zero it out.  */
753
 
754
static struct df_insn_info *
755
df_insn_create_insn_record (struct dataflow *dflow, rtx insn)
756
{
757
  struct df *df = dflow->df;
758
  struct df_scan_problem_data *problem_data
759
    = (struct df_scan_problem_data *) dflow->problem_data;
760
 
761
  struct df_insn_info *insn_rec = DF_INSN_GET (df, insn);
762
  if (!insn_rec)
763
    {
764
      insn_rec = pool_alloc (problem_data->insn_pool);
765
      DF_INSN_SET (df, insn, insn_rec);
766
    }
767
  memset (insn_rec, 0, sizeof (struct df_insn_info));
768
 
769
  return insn_rec;
770
}
771
 
772
 
773
/* Delete all of the refs information from INSN.  */
774
 
775
void
776
df_insn_refs_delete (struct dataflow *dflow, rtx insn)
777
{
778
  struct df *df = dflow->df;
779
  unsigned int uid = INSN_UID (insn);
780
  struct df_insn_info *insn_info = NULL;
781
  struct df_ref *ref;
782
  struct df_scan_problem_data *problem_data
783
    = (struct df_scan_problem_data *) dflow->problem_data;
784
 
785
  if (uid < df->insns_size)
786
    insn_info = DF_INSN_UID_GET (df, uid);
787
 
788
  if (insn_info)
789
    {
790
      struct df_mw_hardreg *hardregs = insn_info->mw_hardregs;
791
 
792
      while (hardregs)
793
        {
794
          struct df_mw_hardreg *next_hr = hardregs->next;
795
          struct df_link *link = hardregs->regs;
796
          while (link)
797
            {
798
              struct df_link *next_l = link->next;
799
              pool_free (problem_data->mw_link_pool, link);
800
              link = next_l;
801
            }
802
 
803
          pool_free (problem_data->mw_reg_pool, hardregs);
804
          hardregs = next_hr;
805
        }
806
 
807
      ref = insn_info->defs;
808
      while (ref)
809
        ref = df_reg_chain_unlink (dflow, ref);
810
 
811
      ref = insn_info->uses;
812
      while (ref)
813
        ref = df_reg_chain_unlink (dflow, ref);
814
 
815
      pool_free (problem_data->insn_pool, insn_info);
816
      DF_INSN_SET (df, insn, NULL);
817
    }
818
}
819
 
820
 
821
/* Delete all of the refs information from basic_block with BB_INDEX.  */
822
 
823
void
824
df_bb_refs_delete (struct dataflow *dflow, int bb_index)
825
{
826
  struct df_ref *def;
827
  struct df_ref *use;
828
 
829
  struct df_scan_bb_info *bb_info
830
    = df_scan_get_bb_info (dflow, bb_index);
831
  rtx insn;
832
  basic_block bb = BASIC_BLOCK (bb_index);
833
  FOR_BB_INSNS (bb, insn)
834
    {
835
      if (INSN_P (insn))
836
        {
837
          /* Record defs within INSN.  */
838
          df_insn_refs_delete (dflow, insn);
839
        }
840
    }
841
 
842
  /* Get rid of any artificial uses or defs.  */
843
  if (bb_info)
844
    {
845
      def = bb_info->artificial_defs;
846
      while (def)
847
        def = df_reg_chain_unlink (dflow, def);
848
      bb_info->artificial_defs = NULL;
849
      use = bb_info->artificial_uses;
850
      while (use)
851
        use = df_reg_chain_unlink (dflow, use);
852
      bb_info->artificial_uses = NULL;
853
    }
854
}
855
 
856
 
857
/* Delete all of the refs information from BLOCKS.  */
858
 
859
void
860
df_refs_delete (struct dataflow *dflow, bitmap blocks)
861
{
862
  bitmap_iterator bi;
863
  unsigned int bb_index;
864
 
865
  EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
866
    {
867
      df_bb_refs_delete (dflow, bb_index);
868
    }
869
}
870
 
871
 
872
/* Take build ref table for either the uses or defs from the reg-use
873
   or reg-def chains.  */
874
 
875
void
876
df_reorganize_refs (struct df_ref_info *ref_info)
877
{
878
  unsigned int m = ref_info->regs_inited;
879
  unsigned int regno;
880
  unsigned int offset = 0;
881
  unsigned int size = 0;
882
 
883
  if (ref_info->refs_organized)
884
    return;
885
 
886
  if (ref_info->refs_size < ref_info->bitmap_size)
887
    {
888
      int new_size = ref_info->bitmap_size + ref_info->bitmap_size / 4;
889
      df_grow_ref_info (ref_info, new_size);
890
    }
891
 
892
  for (regno = 0; regno < m; regno++)
893
    {
894
      struct df_reg_info *reg_info = ref_info->regs[regno];
895
      int count = 0;
896
      if (reg_info)
897
        {
898
          struct df_ref *ref = reg_info->reg_chain;
899
          reg_info->begin = offset;
900
          while (ref)
901
            {
902
              ref_info->refs[offset] = ref;
903
              DF_REF_ID (ref) = offset++;
904
              ref = DF_REF_NEXT_REG (ref);
905
              count++;
906
              size++;
907
            }
908
          reg_info->n_refs = count;
909
        }
910
    }
911
 
912
  /* The bitmap size is not decremented when refs are deleted.  So
913
     reset it now that we have squished out all of the empty
914
     slots.  */
915
  ref_info->bitmap_size = size;
916
  ref_info->refs_organized = true;
917
  ref_info->add_refs_inline = true;
918
}
919
 
920
 
921
/*----------------------------------------------------------------------------
922
   Hard core instruction scanning code.  No external interfaces here,
923
   just a lot of routines that look inside insns.
924
----------------------------------------------------------------------------*/
925
 
926
/* Create a ref and add it to the reg-def or reg-use chains.  */
927
 
928
static struct df_ref *
929
df_ref_create_structure (struct dataflow *dflow, rtx reg, rtx *loc,
930
                         basic_block bb, rtx insn,
931
                         enum df_ref_type ref_type,
932
                         enum df_ref_flags ref_flags)
933
{
934
  struct df_ref *this_ref;
935
  struct df *df = dflow->df;
936
  int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
937
  struct df_scan_problem_data *problem_data
938
    = (struct df_scan_problem_data *) dflow->problem_data;
939
 
940
  this_ref = pool_alloc (problem_data->ref_pool);
941
  DF_REF_REG (this_ref) = reg;
942
  DF_REF_REGNO (this_ref) =  regno;
943
  DF_REF_LOC (this_ref) = loc;
944
  DF_REF_INSN (this_ref) = insn;
945
  DF_REF_CHAIN (this_ref) = NULL;
946
  DF_REF_TYPE (this_ref) = ref_type;
947
  DF_REF_FLAGS (this_ref) = ref_flags;
948
  DF_REF_DATA (this_ref) = NULL;
949
  DF_REF_BB (this_ref) = bb;
950
 
951
  /* Link the ref into the reg_def and reg_use chains and keep a count
952
     of the instances.  */
953
  switch (ref_type)
954
    {
955
    case DF_REF_REG_DEF:
956
      {
957
        struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
958
        reg_info->n_refs++;
959
 
960
        /* Add the ref to the reg_def chain.  */
961
        df_reg_chain_create (reg_info, this_ref);
962
        DF_REF_ID (this_ref) = df->def_info.bitmap_size;
963
        if (df->def_info.add_refs_inline)
964
          {
965
            if (DF_DEFS_SIZE (df) >= df->def_info.refs_size)
966
              {
967
                int new_size = df->def_info.bitmap_size
968
                  + df->def_info.bitmap_size / 4;
969
                df_grow_ref_info (&df->def_info, new_size);
970
              }
971
            /* Add the ref to the big array of defs.  */
972
            DF_DEFS_SET (df, df->def_info.bitmap_size, this_ref);
973
            df->def_info.refs_organized = false;
974
          }
975
 
976
        df->def_info.bitmap_size++;
977
 
978
        if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
979
          {
980
            struct df_scan_bb_info *bb_info
981
              = df_scan_get_bb_info (dflow, bb->index);
982
            this_ref->next_ref = bb_info->artificial_defs;
983
            bb_info->artificial_defs = this_ref;
984
          }
985
        else
986
          {
987
            this_ref->next_ref = DF_INSN_GET (df, insn)->defs;
988
            DF_INSN_GET (df, insn)->defs = this_ref;
989
          }
990
      }
991
      break;
992
 
993
    case DF_REF_REG_MEM_LOAD:
994
    case DF_REF_REG_MEM_STORE:
995
    case DF_REF_REG_USE:
996
      {
997
        struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
998
        reg_info->n_refs++;
999
 
1000
        /* Add the ref to the reg_use chain.  */
1001
        df_reg_chain_create (reg_info, this_ref);
1002
        DF_REF_ID (this_ref) = df->use_info.bitmap_size;
1003
        if (df->use_info.add_refs_inline)
1004
          {
1005
            if (DF_USES_SIZE (df) >= df->use_info.refs_size)
1006
              {
1007
                int new_size = df->use_info.bitmap_size
1008
                  + df->use_info.bitmap_size / 4;
1009
                df_grow_ref_info (&df->use_info, new_size);
1010
              }
1011
            /* Add the ref to the big array of defs.  */
1012
            DF_USES_SET (df, df->use_info.bitmap_size, this_ref);
1013
            df->use_info.refs_organized = false;
1014
          }
1015
 
1016
        df->use_info.bitmap_size++;
1017
        if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
1018
          {
1019
            struct df_scan_bb_info *bb_info
1020
              = df_scan_get_bb_info (dflow, bb->index);
1021
            this_ref->next_ref = bb_info->artificial_uses;
1022
            bb_info->artificial_uses = this_ref;
1023
          }
1024
        else
1025
          {
1026
            this_ref->next_ref = DF_INSN_GET (df, insn)->uses;
1027
            DF_INSN_GET (df, insn)->uses = this_ref;
1028
          }
1029
      }
1030
      break;
1031
 
1032
    default:
1033
      gcc_unreachable ();
1034
 
1035
    }
1036
  return this_ref;
1037
}
1038
 
1039
 
1040
/* Create new references of type DF_REF_TYPE for each part of register REG
1041
   at address LOC within INSN of BB.  */
1042
 
1043
static void
1044
df_ref_record (struct dataflow *dflow, rtx reg, rtx *loc,
1045
               basic_block bb, rtx insn,
1046
               enum df_ref_type ref_type,
1047
               enum df_ref_flags ref_flags,
1048
               bool record_live)
1049
{
1050
  struct df *df = dflow->df;
1051
  rtx oldreg = reg;
1052
  unsigned int regno;
1053
 
1054
  gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
1055
 
1056
  /* For the reg allocator we are interested in some SUBREG rtx's, but not
1057
     all.  Notably only those representing a word extraction from a multi-word
1058
     reg.  As written in the docu those should have the form
1059
     (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
1060
     XXX Is that true?  We could also use the global word_mode variable.  */
1061
  if ((dflow->flags & DF_SUBREGS) == 0
1062
      && GET_CODE (reg) == SUBREG
1063
      && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
1064
          || GET_MODE_SIZE (GET_MODE (reg))
1065
               >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
1066
    {
1067
      loc = &SUBREG_REG (reg);
1068
      reg = *loc;
1069
      ref_flags |= DF_REF_STRIPPED;
1070
    }
1071
 
1072
  regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
1073
  if (regno < FIRST_PSEUDO_REGISTER)
1074
    {
1075
      unsigned int i;
1076
      unsigned int endregno;
1077
      struct df_mw_hardreg *hardreg = NULL;
1078
      struct df_scan_problem_data *problem_data
1079
        = (struct df_scan_problem_data *) dflow->problem_data;
1080
 
1081
      if (!(dflow->flags & DF_HARD_REGS))
1082
        return;
1083
 
1084
      /* GET_MODE (reg) is correct here.  We do not want to go into a SUBREG
1085
         for the mode, because we only want to add references to regs, which
1086
         are really referenced.  E.g., a (subreg:SI (reg:DI 0) 0) does _not_
1087
         reference the whole reg 0 in DI mode (which would also include
1088
         reg 1, at least, if 0 and 1 are SImode registers).  */
1089
      endregno = hard_regno_nregs[regno][GET_MODE (reg)];
1090
      if (GET_CODE (reg) == SUBREG)
1091
        regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
1092
                                      SUBREG_BYTE (reg), GET_MODE (reg));
1093
      endregno += regno;
1094
 
1095
      /*  If this is a multiword hardreg, we create some extra datastructures that
1096
          will enable us to easily build REG_DEAD and REG_UNUSED notes.  */
1097
      if ((endregno != regno + 1) && insn)
1098
        {
1099
          struct df_insn_info *insn_info = DF_INSN_GET (df, insn);
1100
          /* Sets to a subreg of a multiword register are partial.
1101
             Sets to a non-subreg of a multiword register are not.  */
1102
          if (GET_CODE (oldreg) == SUBREG)
1103
            ref_flags |= DF_REF_PARTIAL;
1104
          ref_flags |= DF_REF_MW_HARDREG;
1105
          hardreg = pool_alloc (problem_data->mw_reg_pool);
1106
          hardreg->next = insn_info->mw_hardregs;
1107
          insn_info->mw_hardregs = hardreg;
1108
          hardreg->type = ref_type;
1109
          hardreg->flags = ref_flags;
1110
          hardreg->mw_reg = reg;
1111
          hardreg->regs = NULL;
1112
 
1113
        }
1114
 
1115
      for (i = regno; i < endregno; i++)
1116
        {
1117
          struct df_ref *ref;
1118
 
1119
          /* Calls are handled at call site because regs_ever_live
1120
             doesn't include clobbered regs, only used ones.  */
1121
          if (ref_type == DF_REF_REG_DEF && record_live)
1122
            regs_ever_live[i] = 1;
1123
          else if ((ref_type == DF_REF_REG_USE
1124
                   || ref_type == DF_REF_REG_MEM_STORE
1125
                   || ref_type == DF_REF_REG_MEM_LOAD)
1126
                   && ((ref_flags & DF_REF_ARTIFICIAL) == 0))
1127
            {
1128
              /* Set regs_ever_live on uses of non-eliminable frame
1129
                 pointers and arg pointers.  */
1130
              if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
1131
                     && (regno == FRAME_POINTER_REGNUM
1132
                         || regno == ARG_POINTER_REGNUM)))
1133
                regs_ever_live[i] = 1;
1134
            }
1135
 
1136
          ref = df_ref_create_structure (dflow, regno_reg_rtx[i], loc,
1137
                                         bb, insn, ref_type, ref_flags);
1138
          if (hardreg)
1139
            {
1140
              struct df_link *link = pool_alloc (problem_data->mw_link_pool);
1141
 
1142
              link->next = hardreg->regs;
1143
              link->ref = ref;
1144
              hardreg->regs = link;
1145
            }
1146
        }
1147
    }
1148
  else
1149
    {
1150
      df_ref_create_structure (dflow, reg, loc,
1151
                               bb, insn, ref_type, ref_flags);
1152
    }
1153
}
1154
 
1155
 
1156
/* A set to a non-paradoxical SUBREG for which the number of word_mode units
1157
   covered by the outer mode is smaller than that covered by the inner mode,
1158
   is a read-modify-write operation.
1159
   This function returns true iff the SUBREG X is such a SUBREG.  */
1160
 
1161
bool
1162
df_read_modify_subreg_p (rtx x)
1163
{
1164
  unsigned int isize, osize;
1165
  if (GET_CODE (x) != SUBREG)
1166
    return false;
1167
  isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
1168
  osize = GET_MODE_SIZE (GET_MODE (x));
1169
  return (isize > osize && isize > UNITS_PER_WORD);
1170
}
1171
 
1172
 
1173
/* Process all the registers defined in the rtx, X.
1174
   Autoincrement/decrement definitions will be picked up by
1175
   df_uses_record.  */
1176
 
1177
static void
1178
df_def_record_1 (struct dataflow *dflow, rtx x,
1179
                 basic_block bb, rtx insn,
1180
                 enum df_ref_flags flags, bool record_live)
1181
{
1182
  rtx *loc;
1183
  rtx dst;
1184
  bool dst_in_strict_lowpart = false;
1185
 
1186
 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
1187
     construct.  */
1188
  if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
1189
    loc = &XEXP (x, 0);
1190
  else
1191
    loc = &SET_DEST (x);
1192
  dst = *loc;
1193
 
1194
  /* It is legal to have a set destination be a parallel. */
1195
  if (GET_CODE (dst) == PARALLEL)
1196
    {
1197
      int i;
1198
 
1199
      for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
1200
        {
1201
          rtx temp = XVECEXP (dst, 0, i);
1202
          if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
1203
              || GET_CODE (temp) == SET)
1204
            df_def_record_1 (dflow, temp, bb, insn,
1205
                             GET_CODE (temp) == CLOBBER
1206
                             ? flags | DF_REF_MUST_CLOBBER : flags,
1207
                             record_live);
1208
        }
1209
      return;
1210
    }
1211
 
1212
  /* Maybe, we should flag the use of STRICT_LOW_PART somehow.  It might
1213
     be handy for the reg allocator.  */
1214
  while (GET_CODE (dst) == STRICT_LOW_PART
1215
         || GET_CODE (dst) == ZERO_EXTRACT
1216
         || df_read_modify_subreg_p (dst))
1217
    {
1218
#if 0
1219
      /* Strict low part always contains SUBREG, but we do not want to make
1220
         it appear outside, as whole register is always considered.  */
1221
      if (GET_CODE (dst) == STRICT_LOW_PART)
1222
        {
1223
          loc = &XEXP (dst, 0);
1224
          dst = *loc;
1225
        }
1226
#endif
1227
      loc = &XEXP (dst, 0);
1228
      if (GET_CODE (dst) == STRICT_LOW_PART)
1229
        dst_in_strict_lowpart = true;
1230
      dst = *loc;
1231
      flags |= DF_REF_READ_WRITE;
1232
 
1233
    }
1234
 
1235
  /* Sets to a subreg of a single word register are partial sets if
1236
     they are wrapped in a strict lowpart, and not partial otherwise.
1237
  */
1238
  if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))
1239
      && dst_in_strict_lowpart)
1240
    flags |= DF_REF_PARTIAL;
1241
 
1242
  if (REG_P (dst)
1243
      || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
1244
    df_ref_record (dflow, dst, loc, bb, insn,
1245
                   DF_REF_REG_DEF, flags, record_live);
1246
}
1247
 
1248
 
1249
/* Process all the registers defined in the pattern rtx, X.  */
1250
 
1251
static void
1252
df_defs_record (struct dataflow *dflow, rtx x, basic_block bb, rtx insn)
1253
{
1254
  RTX_CODE code = GET_CODE (x);
1255
 
1256
  if (code == SET || code == CLOBBER)
1257
    {
1258
      /* Mark the single def within the pattern.  */
1259
      df_def_record_1 (dflow, x, bb, insn,
1260
                       code == CLOBBER ? DF_REF_MUST_CLOBBER : 0, true);
1261
    }
1262
  else if (code == COND_EXEC)
1263
    {
1264
      df_defs_record  (dflow, COND_EXEC_CODE (x), bb, insn);
1265
    }
1266
  else if (code == PARALLEL)
1267
    {
1268
      int i;
1269
 
1270
      /* Mark the multiple defs within the pattern.  */
1271
      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1272
         df_defs_record (dflow, XVECEXP (x, 0, i), bb, insn);
1273
    }
1274
}
1275
 
1276
 
1277
/* Process all the registers used in the rtx at address LOC.  */
1278
 
1279
static void
1280
df_uses_record (struct dataflow *dflow, rtx *loc, enum df_ref_type ref_type,
1281
                basic_block bb, rtx insn, enum df_ref_flags flags)
1282
{
1283
  RTX_CODE code;
1284
  rtx x;
1285
 retry:
1286
  x = *loc;
1287
  if (!x)
1288
    return;
1289
  code = GET_CODE (x);
1290
  switch (code)
1291
    {
1292
    case LABEL_REF:
1293
    case SYMBOL_REF:
1294
    case CONST_INT:
1295
    case CONST:
1296
    case CONST_DOUBLE:
1297
    case CONST_VECTOR:
1298
    case PC:
1299
    case CC0:
1300
    case ADDR_VEC:
1301
    case ADDR_DIFF_VEC:
1302
      return;
1303
 
1304
    case CLOBBER:
1305
      /* If we are clobbering a MEM, mark any registers inside the address
1306
         as being used.  */
1307
      if (MEM_P (XEXP (x, 0)))
1308
        df_uses_record (dflow, &XEXP (XEXP (x, 0), 0),
1309
                        DF_REF_REG_MEM_STORE, bb, insn, flags);
1310
 
1311
      /* If we're clobbering a REG then we have a def so ignore.  */
1312
      return;
1313
 
1314
    case MEM:
1315
      df_uses_record (dflow, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn,
1316
                      flags & DF_REF_IN_NOTE);
1317
      return;
1318
 
1319
    case SUBREG:
1320
      /* While we're here, optimize this case.  */
1321
      flags |= DF_REF_PARTIAL;
1322
      /* In case the SUBREG is not of a REG, do not optimize.  */
1323
      if (!REG_P (SUBREG_REG (x)))
1324
        {
1325
          loc = &SUBREG_REG (x);
1326
          df_uses_record (dflow, loc, ref_type, bb, insn, flags);
1327
          return;
1328
        }
1329
      /* ... Fall through ...  */
1330
 
1331
    case REG:
1332
      df_ref_record (dflow, x, loc, bb, insn, ref_type, flags, true);
1333
      return;
1334
 
1335
    case SET:
1336
      {
1337
        rtx dst = SET_DEST (x);
1338
        gcc_assert (!(flags & DF_REF_IN_NOTE));
1339
        df_uses_record (dflow, &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags);
1340
 
1341
        switch (GET_CODE (dst))
1342
          {
1343
            case SUBREG:
1344
              if (df_read_modify_subreg_p (dst))
1345
                {
1346
                  df_uses_record (dflow, &SUBREG_REG (dst),
1347
                                  DF_REF_REG_USE, bb,
1348
                                  insn, flags | DF_REF_READ_WRITE);
1349
                  break;
1350
                }
1351
              /* Fall through.  */
1352
            case REG:
1353
            case PARALLEL:
1354
            case SCRATCH:
1355
            case PC:
1356
            case CC0:
1357
                break;
1358
            case MEM:
1359
              df_uses_record (dflow, &XEXP (dst, 0),
1360
                              DF_REF_REG_MEM_STORE,
1361
                              bb, insn, flags);
1362
              break;
1363
            case STRICT_LOW_PART:
1364
              {
1365
                rtx *temp = &XEXP (dst, 0);
1366
                /* A strict_low_part uses the whole REG and not just the
1367
                 SUBREG.  */
1368
                dst = XEXP (dst, 0);
1369
                df_uses_record (dflow,
1370
                                (GET_CODE (dst) == SUBREG)
1371
                                ? &SUBREG_REG (dst) : temp,
1372
                                DF_REF_REG_USE, bb,
1373
                                insn, DF_REF_READ_WRITE);
1374
              }
1375
              break;
1376
            case ZERO_EXTRACT:
1377
            case SIGN_EXTRACT:
1378
              df_uses_record (dflow, &XEXP (dst, 0),
1379
                              DF_REF_REG_USE, bb, insn,
1380
                              DF_REF_READ_WRITE);
1381
              df_uses_record (dflow, &XEXP (dst, 1),
1382
                              DF_REF_REG_USE, bb, insn, flags);
1383
              df_uses_record (dflow, &XEXP (dst, 2),
1384
                              DF_REF_REG_USE, bb, insn, flags);
1385
              dst = XEXP (dst, 0);
1386
              break;
1387
            default:
1388
              gcc_unreachable ();
1389
          }
1390
        return;
1391
      }
1392
 
1393
    case RETURN:
1394
      break;
1395
 
1396
    case ASM_OPERANDS:
1397
    case UNSPEC_VOLATILE:
1398
    case TRAP_IF:
1399
    case ASM_INPUT:
1400
      {
1401
        /* Traditional and volatile asm instructions must be
1402
           considered to use and clobber all hard registers, all
1403
           pseudo-registers and all of memory.  So must TRAP_IF and
1404
           UNSPEC_VOLATILE operations.
1405
 
1406
           Consider for instance a volatile asm that changes the fpu
1407
           rounding mode.  An insn should not be moved across this
1408
           even if it only uses pseudo-regs because it might give an
1409
           incorrectly rounded result.
1410
 
1411
           However, flow.c's liveness computation did *not* do this,
1412
           giving the reasoning as " ?!? Unfortunately, marking all
1413
           hard registers as live causes massive problems for the
1414
           register allocator and marking all pseudos as live creates
1415
           mountains of uninitialized variable warnings."
1416
 
1417
           In order to maintain the status quo with regard to liveness
1418
           and uses, we do what flow.c did and just mark any regs we
1419
           can find in ASM_OPERANDS as used.  Later on, when liveness
1420
           is computed, asm insns are scanned and regs_asm_clobbered
1421
           is filled out.
1422
 
1423
           For all ASM_OPERANDS, we must traverse the vector of input
1424
           operands.  We can not just fall through here since then we
1425
           would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
1426
           which do not indicate traditional asms unlike their normal
1427
           usage.  */
1428
        if (code == ASM_OPERANDS)
1429
          {
1430
            int j;
1431
 
1432
            for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1433
              df_uses_record (dflow, &ASM_OPERANDS_INPUT (x, j),
1434
                              DF_REF_REG_USE, bb, insn, flags);
1435
            return;
1436
          }
1437
        break;
1438
      }
1439
 
1440
    case PRE_DEC:
1441
    case POST_DEC:
1442
    case PRE_INC:
1443
    case POST_INC:
1444
    case PRE_MODIFY:
1445
    case POST_MODIFY:
1446
      /* Catch the def of the register being modified.  */
1447
      flags |= DF_REF_READ_WRITE;
1448
      df_ref_record (dflow, XEXP (x, 0), &XEXP (x, 0), bb, insn,
1449
                     DF_REF_REG_DEF, flags, true);
1450
 
1451
      /* ... Fall through to handle uses ...  */
1452
 
1453
    default:
1454
      break;
1455
    }
1456
 
1457
  /* Recursively scan the operands of this expression.  */
1458
  {
1459
    const char *fmt = GET_RTX_FORMAT (code);
1460
    int i;
1461
 
1462
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1463
      {
1464
        if (fmt[i] == 'e')
1465
          {
1466
            /* Tail recursive case: save a function call level.  */
1467
            if (i == 0)
1468
              {
1469
                loc = &XEXP (x, 0);
1470
                goto retry;
1471
              }
1472
            df_uses_record (dflow, &XEXP (x, i), ref_type, bb, insn, flags);
1473
          }
1474
        else if (fmt[i] == 'E')
1475
          {
1476
            int j;
1477
            for (j = 0; j < XVECLEN (x, i); j++)
1478
              df_uses_record (dflow, &XVECEXP (x, i, j), ref_type,
1479
                              bb, insn, flags);
1480
          }
1481
      }
1482
  }
1483
}
1484
 
1485
/* Return true if *LOC contains an asm.  */
1486
 
1487
static int
1488
df_insn_contains_asm_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1489
{
1490
  if ( !*loc)
1491
    return 0;
1492
  if (GET_CODE (*loc) == ASM_OPERANDS)
1493
    return 1;
1494
  return 0;
1495
}
1496
 
1497
 
1498
/* Return true if INSN contains an ASM.  */
1499
 
1500
static int
1501
df_insn_contains_asm (rtx insn)
1502
{
1503
  return for_each_rtx (&insn, df_insn_contains_asm_1, NULL);
1504
}
1505
 
1506
 
1507
 
1508
/* Record all the refs for DF within INSN of basic block BB.  */
1509
 
1510
static void
1511
df_insn_refs_record (struct dataflow *dflow, basic_block bb, rtx insn)
1512
{
1513
  struct df *df = dflow->df;
1514
  int i;
1515
 
1516
  if (INSN_P (insn))
1517
    {
1518
      rtx note;
1519
 
1520
      if (df_insn_contains_asm (insn))
1521
        DF_INSN_CONTAINS_ASM (df, insn) = true;
1522
 
1523
      /* Record register defs.  */
1524
      df_defs_record (dflow, PATTERN (insn), bb, insn);
1525
 
1526
      if (dflow->flags & DF_EQUIV_NOTES)
1527
        for (note = REG_NOTES (insn); note;
1528
             note = XEXP (note, 1))
1529
          {
1530
            switch (REG_NOTE_KIND (note))
1531
              {
1532
              case REG_EQUIV:
1533
              case REG_EQUAL:
1534
                df_uses_record (dflow, &XEXP (note, 0), DF_REF_REG_USE,
1535
                                bb, insn, DF_REF_IN_NOTE);
1536
              default:
1537
                break;
1538
              }
1539
          }
1540
 
1541
      if (CALL_P (insn))
1542
        {
1543
          rtx note;
1544
 
1545
          /* Record the registers used to pass arguments, and explicitly
1546
             noted as clobbered.  */
1547
          for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1548
               note = XEXP (note, 1))
1549
            {
1550
              if (GET_CODE (XEXP (note, 0)) == USE)
1551
                df_uses_record (dflow, &XEXP (XEXP (note, 0), 0),
1552
                                DF_REF_REG_USE,
1553
                                bb, insn, 0);
1554
              else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1555
                {
1556
                  df_defs_record (dflow, XEXP (note, 0), bb, insn);
1557
                  if (REG_P (XEXP (XEXP (note, 0), 0)))
1558
                    {
1559
                      rtx reg = XEXP (XEXP (note, 0), 0);
1560
                      int regno_last;
1561
                      int regno_first;
1562
                      int i;
1563
 
1564
                      regno_last = regno_first = REGNO (reg);
1565
                      if (regno_first < FIRST_PSEUDO_REGISTER)
1566
                        regno_last
1567
                          += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
1568
                      for (i = regno_first; i <= regno_last; i++)
1569
                        regs_ever_live[i] = 1;
1570
                    }
1571
                }
1572
            }
1573
 
1574
          /* The stack ptr is used (honorarily) by a CALL insn.  */
1575
          df_uses_record (dflow, &regno_reg_rtx[STACK_POINTER_REGNUM],
1576
                          DF_REF_REG_USE, bb, insn,
1577
                          0);
1578
 
1579
          if (dflow->flags & DF_HARD_REGS)
1580
            {
1581
              bitmap_iterator bi;
1582
              unsigned int ui;
1583
              /* Calls may also reference any of the global registers,
1584
                 so they are recorded as used.  */
1585
              for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1586
                if (global_regs[i])
1587
                  df_uses_record (dflow, &regno_reg_rtx[i],
1588
                                  DF_REF_REG_USE, bb, insn,
1589
                                  0);
1590
              EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
1591
                df_ref_record (dflow, regno_reg_rtx[ui], &regno_reg_rtx[ui], bb,
1592
                               insn, DF_REF_REG_DEF, DF_REF_MAY_CLOBBER, false);
1593
            }
1594
        }
1595
 
1596
      /* Record the register uses.  */
1597
      df_uses_record (dflow, &PATTERN (insn),
1598
                      DF_REF_REG_USE, bb, insn, 0);
1599
 
1600
    }
1601
}
1602
 
1603
static bool
1604
df_has_eh_preds (basic_block bb)
1605
{
1606
  edge e;
1607
  edge_iterator ei;
1608
 
1609
  FOR_EACH_EDGE (e, ei, bb->preds)
1610
    {
1611
      if (e->flags & EDGE_EH)
1612
        return true;
1613
    }
1614
  return false;
1615
}
1616
 
1617
/* Record all the refs within the basic block BB.  */
1618
 
1619
static void
1620
df_bb_refs_record (struct dataflow *dflow, basic_block bb)
1621
{
1622
  struct df *df = dflow->df;
1623
  rtx insn;
1624
  int luid = 0;
1625
  struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb->index);
1626
  bitmap artificial_uses_at_bottom = NULL;
1627
 
1628
  if (dflow->flags & DF_HARD_REGS)
1629
    artificial_uses_at_bottom = BITMAP_ALLOC (NULL);
1630
 
1631
  /* Need to make sure that there is a record in the basic block info. */
1632
  if (!bb_info)
1633
    {
1634
      bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
1635
      df_scan_set_bb_info (dflow, bb->index, bb_info);
1636
      bb_info->artificial_defs = NULL;
1637
      bb_info->artificial_uses = NULL;
1638
    }
1639
 
1640
  /* Scan the block an insn at a time from beginning to end.  */
1641
  FOR_BB_INSNS (bb, insn)
1642
    {
1643
      df_insn_create_insn_record (dflow, insn);
1644
      if (INSN_P (insn))
1645
        {
1646
          /* Record defs within INSN.  */
1647
          DF_INSN_LUID (df, insn) = luid++;
1648
          df_insn_refs_record (dflow, bb, insn);
1649
        }
1650
      DF_INSN_LUID (df, insn) = luid;
1651
    }
1652
 
1653
#ifdef EH_RETURN_DATA_REGNO
1654
  if ((dflow->flags & DF_HARD_REGS)
1655
      && df_has_eh_preds (bb))
1656
    {
1657
      unsigned int i;
1658
      /* Mark the registers that will contain data for the handler.  */
1659
      for (i = 0; ; ++i)
1660
        {
1661
          unsigned regno = EH_RETURN_DATA_REGNO (i);
1662
          if (regno == INVALID_REGNUM)
1663
            break;
1664
          df_ref_record (dflow, regno_reg_rtx[regno], &regno_reg_rtx[regno],
1665
                         bb, NULL,
1666
                         DF_REF_REG_DEF, DF_REF_ARTIFICIAL | DF_REF_AT_TOP,
1667
                         false);
1668
        }
1669
    }
1670
#endif
1671
 
1672
 
1673
  if ((dflow->flags & DF_HARD_REGS)
1674
      && df_has_eh_preds (bb))
1675
    {
1676
#ifdef EH_USES
1677
      unsigned int i;
1678
      /* This code is putting in a artificial ref for the use at the
1679
         TOP of the block that receives the exception.  It is too
1680
         cumbersome to actually put the ref on the edge.  We could
1681
         either model this at the top of the receiver block or the
1682
         bottom of the sender block.
1683
 
1684
         The bottom of the sender block is problematic because not all
1685
         out-edges of the a block are eh-edges.  However, it is true
1686
         that all edges into a block are either eh-edges or none of
1687
         them are eh-edges.  Thus, we can model this at the top of the
1688
         eh-receiver for all of the edges at once. */
1689
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1690
        if (EH_USES (i))
1691
          df_uses_record (dflow, &regno_reg_rtx[i],
1692
                          DF_REF_REG_USE, bb, NULL,
1693
                          DF_REF_ARTIFICIAL | DF_REF_AT_TOP);
1694
#endif
1695
 
1696
      /* The following code (down thru the arg_pointer setting APPEARS
1697
         to be necessary because there is nothing that actually
1698
         describes what the exception handling code may actually need
1699
         to keep alive.  */
1700
      if (reload_completed)
1701
        {
1702
          if (frame_pointer_needed)
1703
            {
1704
              bitmap_set_bit (artificial_uses_at_bottom, FRAME_POINTER_REGNUM);
1705
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1706
              bitmap_set_bit (artificial_uses_at_bottom, HARD_FRAME_POINTER_REGNUM);
1707
#endif
1708
            }
1709
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1710
          if (fixed_regs[ARG_POINTER_REGNUM])
1711
            bitmap_set_bit (artificial_uses_at_bottom, ARG_POINTER_REGNUM);
1712
#endif
1713
        }
1714
    }
1715
 
1716
  if ((dflow->flags & DF_HARD_REGS)
1717
      && bb->index >= NUM_FIXED_BLOCKS)
1718
    {
1719
      /* Before reload, there are a few registers that must be forced
1720
         live everywhere -- which might not already be the case for
1721
         blocks within infinite loops.  */
1722
      if (!reload_completed)
1723
        {
1724
 
1725
          /* Any reference to any pseudo before reload is a potential
1726
             reference of the frame pointer.  */
1727
          bitmap_set_bit (artificial_uses_at_bottom, FRAME_POINTER_REGNUM);
1728
 
1729
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1730
          /* Pseudos with argument area equivalences may require
1731
             reloading via the argument pointer.  */
1732
          if (fixed_regs[ARG_POINTER_REGNUM])
1733
            bitmap_set_bit (artificial_uses_at_bottom, ARG_POINTER_REGNUM);
1734
#endif
1735
 
1736
          /* Any constant, or pseudo with constant equivalences, may
1737
             require reloading from memory using the pic register.  */
1738
          if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1739
              && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1740
            bitmap_set_bit (artificial_uses_at_bottom, PIC_OFFSET_TABLE_REGNUM);
1741
        }
1742
      /* The all-important stack pointer must always be live.  */
1743
      bitmap_set_bit (artificial_uses_at_bottom, STACK_POINTER_REGNUM);
1744
    }
1745
 
1746
  if (dflow->flags & DF_HARD_REGS)
1747
    {
1748
      bitmap_iterator bi;
1749
      unsigned int regno;
1750
 
1751
      EXECUTE_IF_SET_IN_BITMAP (artificial_uses_at_bottom, 0, regno, bi)
1752
        {
1753
          df_uses_record (dflow, &regno_reg_rtx[regno],
1754
                          DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1755
        }
1756
 
1757
      BITMAP_FREE (artificial_uses_at_bottom);
1758
    }
1759
}
1760
 
1761
 
1762
/* Record all the refs in the basic blocks specified by BLOCKS.  */
1763
 
1764
static void
1765
df_refs_record (struct dataflow *dflow, bitmap blocks)
1766
{
1767
  unsigned int bb_index;
1768
  bitmap_iterator bi;
1769
 
1770
  EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
1771
    {
1772
      basic_block bb = BASIC_BLOCK (bb_index);
1773
      df_bb_refs_record (dflow, bb);
1774
    }
1775
 
1776
  if (bitmap_bit_p (blocks, EXIT_BLOCK))
1777
    df_record_exit_block_uses (dflow);
1778
 
1779
  if (bitmap_bit_p (blocks, ENTRY_BLOCK))
1780
    df_record_entry_block_defs (dflow);
1781
}
1782
 
1783
 
1784
/*----------------------------------------------------------------------------
1785
   Specialized hard register scanning functions.
1786
----------------------------------------------------------------------------*/
1787
 
1788
/* Mark a register in SET.  Hard registers in large modes get all
1789
   of their component registers set as well.  */
1790
 
1791
static void
1792
df_mark_reg (rtx reg, void *vset)
1793
{
1794
  bitmap set = (bitmap) vset;
1795
  int regno = REGNO (reg);
1796
 
1797
  gcc_assert (GET_MODE (reg) != BLKmode);
1798
 
1799
  bitmap_set_bit (set, regno);
1800
  if (regno < FIRST_PSEUDO_REGISTER)
1801
    {
1802
      int n = hard_regno_nregs[regno][GET_MODE (reg)];
1803
      while (--n > 0)
1804
        bitmap_set_bit  (set, regno + n);
1805
    }
1806
}
1807
 
1808
 
1809
/* Record the (conservative) set of hard registers that are defined on
1810
   entry to the function.  */
1811
 
1812
static void
1813
df_record_entry_block_defs (struct dataflow *dflow)
1814
{
1815
  unsigned int i;
1816
  bitmap_iterator bi;
1817
  rtx r;
1818
  struct df *df = dflow->df;
1819
 
1820
  bitmap_clear (df->entry_block_defs);
1821
 
1822
  if (!(dflow->flags & DF_HARD_REGS))
1823
    return;
1824
 
1825
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1826
    {
1827
      if (FUNCTION_ARG_REGNO_P (i))
1828
#ifdef INCOMING_REGNO
1829
        bitmap_set_bit (df->entry_block_defs, INCOMING_REGNO (i));
1830
#else
1831
        bitmap_set_bit (df->entry_block_defs, i);
1832
#endif
1833
    }
1834
 
1835
  /* Once the prologue has been generated, all of these registers
1836
     should just show up in the first regular block.  */
1837
  if (HAVE_prologue && epilogue_completed)
1838
    {
1839
      /* Defs for the callee saved registers are inserted so that the
1840
         pushes have some defining location.  */
1841
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1842
        if ((call_used_regs[i] == 0) && (regs_ever_live[i]))
1843
          bitmap_set_bit (df->entry_block_defs, i);
1844
    }
1845
  else
1846
    {
1847
      /* The always important stack pointer.  */
1848
      bitmap_set_bit (df->entry_block_defs, STACK_POINTER_REGNUM);
1849
 
1850
#ifdef INCOMING_RETURN_ADDR_RTX
1851
      if (REG_P (INCOMING_RETURN_ADDR_RTX))
1852
        bitmap_set_bit (df->entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
1853
#endif
1854
 
1855
      /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
1856
         only STATIC_CHAIN_REGNUM is defined.  If they are different,
1857
         we only care about the STATIC_CHAIN_INCOMING_REGNUM.  */
1858
#ifdef STATIC_CHAIN_INCOMING_REGNUM
1859
      bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
1860
#else 
1861
#ifdef STATIC_CHAIN_REGNUM
1862
      bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_REGNUM);
1863
#endif
1864
#endif
1865
 
1866
      r = TARGET_STRUCT_VALUE_RTX (current_function_decl, true);
1867
      if (r && REG_P (r))
1868
        bitmap_set_bit (df->entry_block_defs, REGNO (r));
1869
    }
1870
 
1871
  if ((!reload_completed) || frame_pointer_needed)
1872
    {
1873
      /* Any reference to any pseudo before reload is a potential
1874
         reference of the frame pointer.  */
1875
      bitmap_set_bit (df->entry_block_defs, FRAME_POINTER_REGNUM);
1876
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1877
      /* If they are different, also mark the hard frame pointer as live.  */
1878
      if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
1879
        bitmap_set_bit (df->entry_block_defs, HARD_FRAME_POINTER_REGNUM);
1880
#endif
1881
    }
1882
 
1883
  /* These registers are live everywhere.  */
1884
  if (!reload_completed)
1885
    {
1886
#ifdef EH_USES
1887
      /* The ia-64, the only machine that uses this, does not define these
1888
         until after reload.  */
1889
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1890
        if (EH_USES (i))
1891
          {
1892
            bitmap_set_bit (df->entry_block_defs, i);
1893
          }
1894
#endif
1895
 
1896
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1897
      /* Pseudos with argument area equivalences may require
1898
         reloading via the argument pointer.  */
1899
      if (fixed_regs[ARG_POINTER_REGNUM])
1900
        bitmap_set_bit (df->entry_block_defs, ARG_POINTER_REGNUM);
1901
#endif
1902
 
1903
#ifdef PIC_OFFSET_TABLE_REGNUM
1904
      /* Any constant, or pseudo with constant equivalences, may
1905
         require reloading from memory using the pic register.  */
1906
      if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1907
          && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1908
        bitmap_set_bit (df->entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
1909
#endif
1910
    }
1911
 
1912
  targetm.live_on_entry (df->entry_block_defs);
1913
 
1914
  EXECUTE_IF_SET_IN_BITMAP (df->entry_block_defs, 0, i, bi)
1915
    {
1916
      df_ref_record (dflow, regno_reg_rtx[i], &regno_reg_rtx[i],
1917
                     ENTRY_BLOCK_PTR, NULL,
1918
                     DF_REF_REG_DEF, DF_REF_ARTIFICIAL , false);
1919
    }
1920
}
1921
 
1922
 
1923
/* Record the set of hard registers that are used in the exit block.  */
1924
 
1925
static void
1926
df_record_exit_block_uses (struct dataflow *dflow)
1927
{
1928
  unsigned int i;
1929
  bitmap_iterator bi;
1930
  struct df *df = dflow->df;
1931
 
1932
  bitmap_clear (df->exit_block_uses);
1933
 
1934
  if (!(dflow->flags & DF_HARD_REGS))
1935
    return;
1936
 
1937
  /* If exiting needs the right stack value, consider the stack
1938
     pointer live at the end of the function.  */
1939
  if ((HAVE_epilogue && epilogue_completed)
1940
      || !EXIT_IGNORE_STACK
1941
      || (!FRAME_POINTER_REQUIRED
1942
          && !current_function_calls_alloca
1943
          && flag_omit_frame_pointer)
1944
      || current_function_sp_is_unchanging)
1945
    {
1946
      bitmap_set_bit (df->exit_block_uses, STACK_POINTER_REGNUM);
1947
    }
1948
 
1949
  /* Mark the frame pointer if needed at the end of the function.
1950
     If we end up eliminating it, it will be removed from the live
1951
     list of each basic block by reload.  */
1952
 
1953
  if ((!reload_completed) || frame_pointer_needed)
1954
    {
1955
      bitmap_set_bit (df->exit_block_uses, FRAME_POINTER_REGNUM);
1956
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1957
      /* If they are different, also mark the hard frame pointer as live.  */
1958
      if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
1959
        bitmap_set_bit (df->exit_block_uses, HARD_FRAME_POINTER_REGNUM);
1960
#endif
1961
    }
1962
 
1963
#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
1964
  /* Many architectures have a GP register even without flag_pic.
1965
     Assume the pic register is not in use, or will be handled by
1966
     other means, if it is not fixed.  */
1967
  if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1968
      && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1969
    bitmap_set_bit (df->exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
1970
#endif
1971
 
1972
  /* Mark all global registers, and all registers used by the
1973
     epilogue as being live at the end of the function since they
1974
     may be referenced by our caller.  */
1975
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1976
    if (global_regs[i] || EPILOGUE_USES (i))
1977
      bitmap_set_bit (df->exit_block_uses, i);
1978
 
1979
  if (HAVE_epilogue && epilogue_completed)
1980
    {
1981
      /* Mark all call-saved registers that we actually used.  */
1982
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1983
        if (regs_ever_live[i] && !LOCAL_REGNO (i)
1984
            && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1985
          bitmap_set_bit (df->exit_block_uses, i);
1986
    }
1987
 
1988
#ifdef EH_RETURN_DATA_REGNO
1989
  /* Mark the registers that will contain data for the handler.  */
1990
  if (reload_completed && current_function_calls_eh_return)
1991
    for (i = 0; ; ++i)
1992
      {
1993
        unsigned regno = EH_RETURN_DATA_REGNO (i);
1994
        if (regno == INVALID_REGNUM)
1995
          break;
1996
        bitmap_set_bit (df->exit_block_uses, regno);
1997
      }
1998
#endif
1999
 
2000
#ifdef EH_RETURN_STACKADJ_RTX
2001
  if ((!HAVE_epilogue || ! epilogue_completed)
2002
      && current_function_calls_eh_return)
2003
    {
2004
      rtx tmp = EH_RETURN_STACKADJ_RTX;
2005
      if (tmp && REG_P (tmp))
2006
        df_mark_reg (tmp, df->exit_block_uses);
2007
    }
2008
#endif
2009
 
2010
#ifdef EH_RETURN_HANDLER_RTX
2011
  if ((!HAVE_epilogue || ! epilogue_completed)
2012
      && current_function_calls_eh_return)
2013
    {
2014
      rtx tmp = EH_RETURN_HANDLER_RTX;
2015
      if (tmp && REG_P (tmp))
2016
        df_mark_reg (tmp, df->exit_block_uses);
2017
    }
2018
#endif 
2019
 
2020
  /* Mark function return value.  */
2021
  diddle_return_value (df_mark_reg, (void*) df->exit_block_uses);
2022
 
2023
  if (dflow->flags & DF_HARD_REGS)
2024
    EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, 0, i, bi)
2025
      df_uses_record (dflow, &regno_reg_rtx[i],
2026
                      DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL,
2027
                      DF_REF_ARTIFICIAL);
2028
}
2029
 
2030
static bool initialized = false;
2031
 
2032
/* Initialize some platform specific structures.  */
2033
 
2034
void
2035
df_hard_reg_init (void)
2036
{
2037
  int i;
2038
#ifdef ELIMINABLE_REGS
2039
  static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
2040
#endif
2041
  /* After reload, some ports add certain bits to regs_ever_live so
2042
     this cannot be reset.  */
2043
 
2044
  if (!reload_completed)
2045
    memset (regs_ever_live, 0, sizeof (regs_ever_live));
2046
 
2047
  if (initialized)
2048
    return;
2049
 
2050
  bitmap_obstack_initialize (&persistent_obstack);
2051
 
2052
  /* Record which registers will be eliminated.  We use this in
2053
     mark_used_regs.  */
2054
  CLEAR_HARD_REG_SET (elim_reg_set);
2055
 
2056
#ifdef ELIMINABLE_REGS
2057
  for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2058
    SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2059
#else
2060
  SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2061
#endif
2062
 
2063
  df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
2064
 
2065
  /* Inconveniently, this is only readily available in hard reg set
2066
     form.  */
2067
  for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
2068
    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2069
      bitmap_set_bit (df_invalidated_by_call, i);
2070
 
2071
  initialized = true;
2072
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.