OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [df-problems.c] - Blame information for rev 856

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Standard problems for dataflow support routines.
2
   Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
3
   Free Software Foundation, Inc.
4
   Originally contributed by Michael P. Hayes
5
             (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6
   Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7
             and Kenneth Zadeck (zadeck@naturalbridge.com).
8
 
9
This file is part of GCC.
10
 
11
GCC is free software; you can redistribute it and/or modify it under
12
the terms of the GNU General Public License as published by the Free
13
Software Foundation; either version 3, or (at your option) any later
14
version.
15
 
16
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17
WARRANTY; without even the implied warranty of MERCHANTABILITY or
18
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
19
for more details.
20
 
21
You should have received a copy of the GNU General Public License
22
along with GCC; see the file COPYING3.  If not see
23
<http://www.gnu.org/licenses/>.  */
24
 
25
#include "config.h"
26
#include "system.h"
27
#include "coretypes.h"
28
#include "tm.h"
29
#include "rtl.h"
30
#include "tm_p.h"
31
#include "insn-config.h"
32
#include "recog.h"
33
#include "function.h"
34
#include "regs.h"
35
#include "output.h"
36
#include "alloc-pool.h"
37
#include "flags.h"
38
#include "hard-reg-set.h"
39
#include "basic-block.h"
40
#include "sbitmap.h"
41
#include "bitmap.h"
42
#include "timevar.h"
43
#include "df.h"
44
#include "vecprim.h"
45
#include "except.h"
46
 
47
#if 0
48
#define REG_DEAD_DEBUGGING
49
#endif
50
 
51
#define DF_SPARSE_THRESHOLD 32
52
 
53
static bitmap seen_in_block = NULL;
54
static bitmap seen_in_insn = NULL;
55
static void df_ri_dump (struct dataflow *, FILE *);
56
 
57
 
58
/*----------------------------------------------------------------------------
59
   Public functions access functions for the dataflow problems.
60
----------------------------------------------------------------------------*/
61
 
62
/* Create a du or ud chain from SRC to DST and link it into SRC.   */
63
 
64
struct df_link *
65
df_chain_create (struct dataflow *dflow, struct df_ref *src, struct df_ref *dst)
66
{
67
  struct df_link *head = DF_REF_CHAIN (src);
68
  struct df_link *link = pool_alloc (dflow->block_pool);;
69
 
70
  DF_REF_CHAIN (src) = link;
71
  link->next = head;
72
  link->ref = dst;
73
  return link;
74
}
75
 
76
 
77
/* Delete a du or ud chain for REF.  If LINK is NULL, delete all
78
   chains for ref and check to see if the reverse chains can also be
79
   deleted.  If LINK is not NULL it must be a link off of ref.  In
80
   this case, the other end is not deleted.  */
81
 
82
void
83
df_chain_unlink (struct dataflow *dflow, struct df_ref *ref, struct df_link *link)
84
{
85
  struct df_link *chain = DF_REF_CHAIN (ref);
86
  if (link)
87
    {
88
      /* Link was the first element in the chain.  */
89
      if (chain == link)
90
        DF_REF_CHAIN (ref) = link->next;
91
      else
92
        {
93
          /* Link is an internal element in the chain.  */
94
          struct df_link *prev = chain;
95
          while (chain)
96
            {
97
              if (chain == link)
98
                {
99
                  prev->next = chain->next;
100
                  break;
101
                }
102
              prev = chain;
103
              chain = chain->next;
104
            }
105
        }
106
      pool_free (dflow->block_pool, link);
107
    }
108
  else
109
    {
110
      /* If chain is NULL here, it was because of a recursive call
111
         when the other flavor of chains was not built.  Just run thru
112
         the entire chain calling the other side and then deleting the
113
         link.  */
114
      while (chain)
115
        {
116
          struct df_link *next = chain->next;
117
          /* Delete the other side if it exists.  */
118
          df_chain_unlink (dflow, chain->ref, chain);
119
          chain = next;
120
        }
121
    }
122
}
123
 
124
 
125
/* Copy the du or ud chain starting at FROM_REF and attach it to
126
   TO_REF.  */
127
 
128
void
129
df_chain_copy (struct dataflow *dflow,
130
               struct df_ref *to_ref,
131
               struct df_link *from_ref)
132
{
133
  while (from_ref)
134
    {
135
      df_chain_create (dflow, to_ref, from_ref->ref);
136
      from_ref = from_ref->next;
137
    }
138
}
139
 
140
 
141
/* Get the live in set for BB no matter what problem happens to be
142
   defined.  */
143
 
144
bitmap
145
df_get_live_in (struct df *df, basic_block bb)
146
{
147
  gcc_assert (df->problems_by_index[DF_LR]);
148
 
149
  if (df->problems_by_index[DF_UREC])
150
    return DF_RA_LIVE_IN (df, bb);
151
  else if (df->problems_by_index[DF_UR])
152
    return DF_LIVE_IN (df, bb);
153
  else
154
    return DF_UPWARD_LIVE_IN (df, bb);
155
}
156
 
157
 
158
/* Get the live out set for BB no matter what problem happens to be
159
   defined.  */
160
 
161
bitmap
162
df_get_live_out (struct df *df, basic_block bb)
163
{
164
  gcc_assert (df->problems_by_index[DF_LR]);
165
 
166
  if (df->problems_by_index[DF_UREC])
167
    return DF_RA_LIVE_OUT (df, bb);
168
  else if (df->problems_by_index[DF_UR])
169
    return DF_LIVE_OUT (df, bb);
170
  else
171
    return DF_UPWARD_LIVE_OUT (df, bb);
172
}
173
 
174
 
175
/*----------------------------------------------------------------------------
176
   Utility functions.
177
----------------------------------------------------------------------------*/
178
 
179
/* Generic versions to get the void* version of the block info.  Only
180
   used inside the problem instance vectors.  */
181
 
182
/* Grow the bb_info array.  */
183
 
184
void
185
df_grow_bb_info (struct dataflow *dflow)
186
{
187
  unsigned int new_size = last_basic_block + 1;
188
  if (dflow->block_info_size < new_size)
189
    {
190
      new_size += new_size / 4;
191
      dflow->block_info = xrealloc (dflow->block_info,
192
                                    new_size *sizeof (void*));
193
      memset (dflow->block_info + dflow->block_info_size, 0,
194
              (new_size - dflow->block_info_size) *sizeof (void *));
195
      dflow->block_info_size = new_size;
196
    }
197
}
198
 
199
/* Dump a def-use or use-def chain for REF to FILE.  */
200
 
201
void
202
df_chain_dump (struct df_link *link, FILE *file)
203
{
204
  fprintf (file, "{ ");
205
  for (; link; link = link->next)
206
    {
207
      fprintf (file, "%c%d(bb %d insn %d) ",
208
               DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
209
               DF_REF_ID (link->ref),
210
               DF_REF_BBNO (link->ref),
211
               DF_REF_INSN (link->ref) ? DF_REF_INSN_UID (link->ref) : -1);
212
    }
213
  fprintf (file, "}");
214
}
215
 
216
 
217
/* Print some basic block info as part of df_dump.  */
218
 
219
void
220
df_print_bb_index (basic_block bb, FILE *file)
221
{
222
  edge e;
223
  edge_iterator ei;
224
 
225
  fprintf (file, "( ");
226
    FOR_EACH_EDGE (e, ei, bb->preds)
227
    {
228
      basic_block pred = e->src;
229
      fprintf (file, "%d ", pred->index);
230
    }
231
  fprintf (file, ")->[%d]->( ", bb->index);
232
  FOR_EACH_EDGE (e, ei, bb->succs)
233
    {
234
      basic_block succ = e->dest;
235
      fprintf (file, "%d ", succ->index);
236
    }
237
  fprintf (file, ")\n");
238
}
239
 
240
 
241
/* Return a bitmap for REGNO from the cache MAPS.  The bitmap is to
242
   contain COUNT bits starting at START.  These bitmaps are not to be
243
   changed since there is a cache of them.  */
244
 
245
static inline bitmap
246
df_ref_bitmap (bitmap *maps, unsigned int regno, int start, int count)
247
{
248
  bitmap ids = maps[regno];
249
  if (!ids)
250
    {
251
      unsigned int i;
252
      unsigned int end = start + count;;
253
      ids = BITMAP_ALLOC (NULL);
254
      maps[regno] = ids;
255
      for (i = start; i < end; i++)
256
        bitmap_set_bit (ids, i);
257
    }
258
  return ids;
259
}
260
 
261
 
262
/* Make sure that the seen_in_insn and seen_in_block sbitmaps are set
263
   up correctly. */
264
 
265
static void
266
df_set_seen (void)
267
{
268
  seen_in_block = BITMAP_ALLOC (NULL);
269
  seen_in_insn = BITMAP_ALLOC (NULL);
270
}
271
 
272
 
273
static void
274
df_unset_seen (void)
275
{
276
  BITMAP_FREE (seen_in_block);
277
  BITMAP_FREE (seen_in_insn);
278
}
279
 
280
 
281
 
282
/*----------------------------------------------------------------------------
283
   REACHING USES
284
 
285
   Find the locations in the function where each use site for a pseudo
286
   can reach backwards.  In and out bitvectors are built for each basic
287
   block.  The id field in the ref is used to index into these sets.
288
   See df.h for details.
289
 
290
----------------------------------------------------------------------------*/
291
 
292
/* This problem plays a large number of games for the sake of
293
   efficiency.
294
 
295
   1) The order of the bits in the bitvectors.  After the scanning
296
   phase, all of the uses are sorted.  All of the uses for the reg 0
297
   are first, followed by all uses for reg 1 and so on.
298
 
299
   2) There are two kill sets, one if the number of uses is less or
300
   equal to DF_SPARSE_THRESHOLD and another if it is greater.
301
 
302
   <= : There is a bitmap for each register, uses_sites[N], that is
303
   built on demand.  This bitvector contains a 1 for each use or reg
304
   N.
305
 
306
   > : One level of indirection is used to keep from generating long
307
   strings of 1 bits in the kill sets.  Bitvectors that are indexed
308
   by the regnum are used to represent that there is a killing def
309
   for the register.  The confluence and transfer functions use
310
   these along with the bitmap_clear_range call to remove ranges of
311
   bits without actually generating a knockout vector.
312
 
313
   The kill and sparse_kill and the dense_invalidated_by_call and
314
   sparse_invalidated_by call both play this game.  */
315
 
316
/* Private data used to compute the solution for this problem.  These
317
   data structures are not accessible outside of this module.  */
318
struct df_ru_problem_data
319
{
320
 
321
  bitmap *use_sites;            /* Bitmap of uses for each pseudo.  */
322
  unsigned int use_sites_size;  /* Size of use_sites.  */
323
  /* The set of defs to regs invalidated by call.  */
324
  bitmap sparse_invalidated_by_call;
325
  /* The set of defs to regs invalidated by call for ru.  */
326
  bitmap dense_invalidated_by_call;
327
};
328
 
329
/* Get basic block info.  */
330
 
331
struct df_ru_bb_info *
332
df_ru_get_bb_info (struct dataflow *dflow, unsigned int index)
333
{
334
  return (struct df_ru_bb_info *) dflow->block_info[index];
335
}
336
 
337
 
338
/* Set basic block info.  */
339
 
340
static void
341
df_ru_set_bb_info (struct dataflow *dflow, unsigned int index,
342
                   struct df_ru_bb_info *bb_info)
343
{
344
  dflow->block_info[index] = bb_info;
345
}
346
 
347
 
348
/* Free basic block info.  */
349
 
350
static void
351
df_ru_free_bb_info (struct dataflow *dflow,
352
                    basic_block bb ATTRIBUTE_UNUSED,
353
                    void *vbb_info)
354
{
355
  struct df_ru_bb_info *bb_info = (struct df_ru_bb_info *) vbb_info;
356
  if (bb_info)
357
    {
358
      BITMAP_FREE (bb_info->kill);
359
      BITMAP_FREE (bb_info->sparse_kill);
360
      BITMAP_FREE (bb_info->gen);
361
      BITMAP_FREE (bb_info->in);
362
      BITMAP_FREE (bb_info->out);
363
      pool_free (dflow->block_pool, bb_info);
364
    }
365
}
366
 
367
 
368
/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
369
   not touched unless the block is new.  */
370
 
371
static void
372
df_ru_alloc (struct dataflow *dflow,
373
             bitmap blocks_to_rescan ATTRIBUTE_UNUSED,
374
             bitmap all_blocks)
375
{
376
  unsigned int bb_index;
377
  bitmap_iterator bi;
378
  unsigned int reg_size = max_reg_num ();
379
 
380
  if (!dflow->block_pool)
381
    dflow->block_pool = create_alloc_pool ("df_ru_block pool",
382
                                           sizeof (struct df_ru_bb_info), 50);
383
 
384
  if (dflow->problem_data)
385
    {
386
      unsigned int i;
387
      struct df_ru_problem_data *problem_data
388
        = (struct df_ru_problem_data *) dflow->problem_data;
389
 
390
      for (i = 0; i < problem_data->use_sites_size; i++)
391
        {
392
          bitmap bm = problem_data->use_sites[i];
393
          if (bm)
394
            {
395
              BITMAP_FREE (bm);
396
              problem_data->use_sites[i] = NULL;
397
            }
398
        }
399
 
400
      if (problem_data->use_sites_size < reg_size)
401
        {
402
          problem_data->use_sites
403
            = xrealloc (problem_data->use_sites, reg_size * sizeof (bitmap));
404
          memset (problem_data->use_sites + problem_data->use_sites_size, 0,
405
                  (reg_size - problem_data->use_sites_size) * sizeof (bitmap));
406
          problem_data->use_sites_size = reg_size;
407
        }
408
 
409
      bitmap_clear (problem_data->sparse_invalidated_by_call);
410
      bitmap_clear (problem_data->dense_invalidated_by_call);
411
    }
412
  else
413
    {
414
      struct df_ru_problem_data *problem_data = XNEW (struct df_ru_problem_data);
415
      dflow->problem_data = problem_data;
416
 
417
      problem_data->use_sites = XCNEWVEC (bitmap, reg_size);
418
      problem_data->use_sites_size = reg_size;
419
      problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
420
      problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
421
    }
422
 
423
  df_grow_bb_info (dflow);
424
 
425
  /* Because of the clustering of all def sites for the same pseudo,
426
     we have to process all of the blocks before doing the
427
     analysis.  */
428
 
429
  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
430
    {
431
      struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
432
      if (bb_info)
433
        {
434
          bitmap_clear (bb_info->kill);
435
          bitmap_clear (bb_info->sparse_kill);
436
          bitmap_clear (bb_info->gen);
437
        }
438
      else
439
        {
440
          bb_info = (struct df_ru_bb_info *) pool_alloc (dflow->block_pool);
441
          df_ru_set_bb_info (dflow, bb_index, bb_info);
442
          bb_info->kill = BITMAP_ALLOC (NULL);
443
          bb_info->sparse_kill = BITMAP_ALLOC (NULL);
444
          bb_info->gen = BITMAP_ALLOC (NULL);
445
          bb_info->in = BITMAP_ALLOC (NULL);
446
          bb_info->out = BITMAP_ALLOC (NULL);
447
        }
448
    }
449
}
450
 
451
 
452
/* Process a list of DEFs for df_ru_bb_local_compute.  */
453
 
454
static void
455
df_ru_bb_local_compute_process_def (struct dataflow *dflow,
456
                                    struct df_ru_bb_info *bb_info,
457
                                    struct df_ref *def,
458
                                    enum df_ref_flags top_flag)
459
{
460
  struct df *df = dflow->df;
461
  while (def)
462
    {
463
      if ((top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
464
          /* If the def is to only part of the reg, it is as if it did
465
             not happen, since some of the bits may get thru.  */
466
          && (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL)))
467
        {
468
          unsigned int regno = DF_REF_REGNO (def);
469
          unsigned int begin = DF_REG_USE_GET (df, regno)->begin;
470
          unsigned int n_uses = DF_REG_USE_GET (df, regno)->n_refs;
471
          if (!bitmap_bit_p (seen_in_block, regno))
472
            {
473
              /* The first def for regno in the insn, causes the kill
474
                 info to be generated.  Do not modify the gen set
475
                 because the only values in it are the uses from here
476
                 to the top of the block and this def does not effect
477
                 them.  */
478
              if (!bitmap_bit_p (seen_in_insn, regno))
479
                {
480
                  if (n_uses > DF_SPARSE_THRESHOLD)
481
                    bitmap_set_bit (bb_info->sparse_kill, regno);
482
                  else
483
                    {
484
                      struct df_ru_problem_data * problem_data
485
                        = (struct df_ru_problem_data *)dflow->problem_data;
486
                      bitmap uses
487
                        = df_ref_bitmap (problem_data->use_sites, regno,
488
                                       begin, n_uses);
489
                      bitmap_ior_into (bb_info->kill, uses);
490
                    }
491
                }
492
              bitmap_set_bit (seen_in_insn, regno);
493
            }
494
        }
495
      def = def->next_ref;
496
    }
497
}
498
 
499
 
500
/* Process a list of USEs for df_ru_bb_local_compute.  */
501
 
502
static void
503
df_ru_bb_local_compute_process_use (struct df_ru_bb_info *bb_info,
504
                                    struct df_ref *use,
505
                                    enum df_ref_flags top_flag)
506
{
507
  while (use)
508
    {
509
      if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
510
        {
511
          /* Add use to set of gens in this BB unless we have seen a
512
             def in a previous instruction.  */
513
          unsigned int regno = DF_REF_REGNO (use);
514
          if (!bitmap_bit_p (seen_in_block, regno))
515
            bitmap_set_bit (bb_info->gen, DF_REF_ID (use));
516
        }
517
      use = use->next_ref;
518
    }
519
}
520
 
521
/* Compute local reaching use (upward exposed use) info for basic
522
   block BB.  USE_INFO->REGS[R] caches the set of uses for register R.  */
523
static void
524
df_ru_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
525
{
526
  struct df *df = dflow->df;
527
  basic_block bb = BASIC_BLOCK (bb_index);
528
  struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
529
  rtx insn;
530
 
531
  /* Set when a def for regno is seen.  */
532
  bitmap_clear (seen_in_block);
533
  bitmap_clear (seen_in_insn);
534
 
535
#ifdef EH_USES
536
  /* Variables defined in the prolog that are used by the exception
537
     handler.  */
538
  df_ru_bb_local_compute_process_use (bb_info,
539
                                      df_get_artificial_uses (df, bb_index),
540
                                      DF_REF_AT_TOP);
541
#endif
542
  df_ru_bb_local_compute_process_def (dflow, bb_info,
543
                                      df_get_artificial_defs (df, bb_index),
544
                                      DF_REF_AT_TOP);
545
 
546
  FOR_BB_INSNS (bb, insn)
547
    {
548
      unsigned int uid = INSN_UID (insn);
549
      if (!INSN_P (insn))
550
        continue;
551
 
552
      df_ru_bb_local_compute_process_use (bb_info,
553
                                          DF_INSN_UID_USES (df, uid), 0);
554
 
555
      df_ru_bb_local_compute_process_def (dflow, bb_info,
556
                                          DF_INSN_UID_DEFS (df, uid), 0);
557
 
558
      bitmap_ior_into (seen_in_block, seen_in_insn);
559
      bitmap_clear (seen_in_insn);
560
    }
561
 
562
  /* Process the hardware registers that are always live.  */
563
  df_ru_bb_local_compute_process_use (bb_info,
564
                                      df_get_artificial_uses (df, bb_index), 0);
565
 
566
  df_ru_bb_local_compute_process_def (dflow, bb_info,
567
                                      df_get_artificial_defs (df, bb_index), 0);
568
}
569
 
570
 
571
/* Compute local reaching use (upward exposed use) info for each basic
572
   block within BLOCKS.  */
573
static void
574
df_ru_local_compute (struct dataflow *dflow,
575
                     bitmap all_blocks,
576
                     bitmap rescan_blocks  ATTRIBUTE_UNUSED)
577
{
578
  struct df *df = dflow->df;
579
  unsigned int bb_index;
580
  bitmap_iterator bi;
581
  unsigned int regno;
582
  struct df_ru_problem_data *problem_data
583
    = (struct df_ru_problem_data *) dflow->problem_data;
584
  bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
585
  bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
586
 
587
  df_set_seen ();
588
 
589
  if (!df->use_info.refs_organized)
590
    df_reorganize_refs (&df->use_info);
591
 
592
  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
593
    {
594
      df_ru_bb_local_compute (dflow, bb_index);
595
    }
596
 
597
  /* Set up the knockout bit vectors to be applied across EH_EDGES.  */
598
  EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
599
    {
600
      struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
601
      if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
602
        bitmap_set_bit (sparse_invalidated, regno);
603
      else
604
        {
605
          bitmap defs = df_ref_bitmap (problem_data->use_sites, regno,
606
                                       reg_info->begin, reg_info->n_refs);
607
          bitmap_ior_into (dense_invalidated, defs);
608
        }
609
    }
610
 
611
  df_unset_seen ();
612
}
613
 
614
 
615
/* Initialize the solution bit vectors for problem.  */
616
 
617
static void
618
df_ru_init_solution (struct dataflow *dflow, bitmap all_blocks)
619
{
620
  unsigned int bb_index;
621
  bitmap_iterator bi;
622
 
623
  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
624
    {
625
      struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
626
      bitmap_copy (bb_info->in, bb_info->gen);
627
      bitmap_clear (bb_info->out);
628
    }
629
}
630
 
631
 
632
/* Out of target gets or of in of source.  */
633
 
634
static void
635
df_ru_confluence_n (struct dataflow *dflow, edge e)
636
{
637
  bitmap op1 = df_ru_get_bb_info (dflow, e->src->index)->out;
638
  bitmap op2 = df_ru_get_bb_info (dflow, e->dest->index)->in;
639
 
640
  if (e->flags & EDGE_EH)
641
    {
642
      struct df_ru_problem_data *problem_data
643
        = (struct df_ru_problem_data *) dflow->problem_data;
644
      bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
645
      bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
646
      struct df *df = dflow->df;
647
      bitmap_iterator bi;
648
      unsigned int regno;
649
      bitmap tmp = BITMAP_ALLOC (NULL);
650
 
651
      bitmap_copy (tmp, op2);
652
      bitmap_and_compl_into (tmp, dense_invalidated);
653
 
654
      EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
655
        {
656
          bitmap_clear_range (tmp,
657
                              DF_REG_USE_GET (df, regno)->begin,
658
                              DF_REG_USE_GET (df, regno)->n_refs);
659
        }
660
      bitmap_ior_into (op1, tmp);
661
      BITMAP_FREE (tmp);
662
    }
663
  else
664
    bitmap_ior_into (op1, op2);
665
}
666
 
667
 
668
/* Transfer function.  */
669
 
670
static bool
671
df_ru_transfer_function (struct dataflow *dflow, int bb_index)
672
{
673
  struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
674
  unsigned int regno;
675
  bitmap_iterator bi;
676
  bitmap in = bb_info->in;
677
  bitmap out = bb_info->out;
678
  bitmap gen = bb_info->gen;
679
  bitmap kill = bb_info->kill;
680
  bitmap sparse_kill = bb_info->sparse_kill;
681
 
682
  if (bitmap_empty_p (sparse_kill))
683
    return  bitmap_ior_and_compl (in, gen, out, kill);
684
  else
685
    {
686
      struct df *df = dflow->df;
687
      bool changed = false;
688
      bitmap tmp = BITMAP_ALLOC (NULL);
689
      bitmap_copy (tmp, out);
690
      EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
691
        {
692
          bitmap_clear_range (tmp,
693
                              DF_REG_USE_GET (df, regno)->begin,
694
                              DF_REG_USE_GET (df, regno)->n_refs);
695
        }
696
      bitmap_and_compl_into (tmp, kill);
697
      bitmap_ior_into (tmp, gen);
698
      changed = !bitmap_equal_p (tmp, in);
699
      if (changed)
700
        {
701
          BITMAP_FREE (in);
702
          bb_info->in = tmp;
703
        }
704
      else
705
        BITMAP_FREE (tmp);
706
      return changed;
707
    }
708
}
709
 
710
 
711
/* Free all storage associated with the problem.  */
712
 
713
static void
714
df_ru_free (struct dataflow *dflow)
715
{
716
  unsigned int i;
717
  struct df_ru_problem_data *problem_data
718
    = (struct df_ru_problem_data *) dflow->problem_data;
719
 
720
  if (problem_data)
721
    {
722
      for (i = 0; i < dflow->block_info_size; i++)
723
        {
724
          struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, i);
725
          if (bb_info)
726
            {
727
              BITMAP_FREE (bb_info->kill);
728
              BITMAP_FREE (bb_info->sparse_kill);
729
              BITMAP_FREE (bb_info->gen);
730
              BITMAP_FREE (bb_info->in);
731
              BITMAP_FREE (bb_info->out);
732
            }
733
        }
734
 
735
      free_alloc_pool (dflow->block_pool);
736
 
737
      for (i = 0; i < problem_data->use_sites_size; i++)
738
        {
739
          bitmap bm = problem_data->use_sites[i];
740
          if (bm)
741
            BITMAP_FREE (bm);
742
        }
743
 
744
      free (problem_data->use_sites);
745
      BITMAP_FREE (problem_data->sparse_invalidated_by_call);
746
      BITMAP_FREE (problem_data->dense_invalidated_by_call);
747
 
748
      dflow->block_info_size = 0;
749
      free (dflow->block_info);
750
      free (dflow->problem_data);
751
    }
752
  free (dflow);
753
}
754
 
755
 
756
/* Debugging info.  */
757
 
758
static void
759
df_ru_dump (struct dataflow *dflow, FILE *file)
760
{
761
  basic_block bb;
762
  struct df *df = dflow->df;
763
  struct df_ru_problem_data *problem_data
764
    = (struct df_ru_problem_data *) dflow->problem_data;
765
  unsigned int m = max_reg_num ();
766
  unsigned int regno;
767
 
768
  if (!dflow->block_info)
769
    return;
770
 
771
  fprintf (file, "Reaching uses:\n");
772
 
773
  fprintf (file, "  sparse invalidated \t");
774
  dump_bitmap (file, problem_data->sparse_invalidated_by_call);
775
  fprintf (file, "  dense invalidated \t");
776
  dump_bitmap (file, problem_data->dense_invalidated_by_call);
777
 
778
  for (regno = 0; regno < m; regno++)
779
    if (DF_REG_USE_GET (df, regno)->n_refs)
780
      fprintf (file, "%d[%d,%d] ", regno,
781
               DF_REG_USE_GET (df, regno)->begin,
782
               DF_REG_USE_GET (df, regno)->n_refs);
783
  fprintf (file, "\n");
784
 
785
  FOR_ALL_BB (bb)
786
    {
787
      struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb->index);
788
      df_print_bb_index (bb, file);
789
 
790
      if (!bb_info->in)
791
        continue;
792
 
793
      fprintf (file, "  in  \t(%d)\n", (int) bitmap_count_bits (bb_info->in));
794
      dump_bitmap (file, bb_info->in);
795
      fprintf (file, "  gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
796
      dump_bitmap (file, bb_info->gen);
797
      fprintf (file, "  kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
798
      dump_bitmap (file, bb_info->kill);
799
      fprintf (file, "  out \t(%d)\n", (int) bitmap_count_bits (bb_info->out));
800
      dump_bitmap (file, bb_info->out);
801
    }
802
}
803
 
804
/* All of the information associated with every instance of the problem.  */
805
 
806
static struct df_problem problem_RU =
807
{
808
  DF_RU,                      /* Problem id.  */
809
  DF_BACKWARD,                /* Direction.  */
810
  df_ru_alloc,                /* Allocate the problem specific data.  */
811
  NULL,                       /* Reset global information.  */
812
  df_ru_free_bb_info,         /* Free basic block info.  */
813
  df_ru_local_compute,        /* Local compute function.  */
814
  df_ru_init_solution,        /* Init the solution specific data.  */
815
  df_iterative_dataflow,      /* Iterative solver.  */
816
  NULL,                       /* Confluence operator 0.  */
817
  df_ru_confluence_n,         /* Confluence operator n.  */
818
  df_ru_transfer_function,    /* Transfer function.  */
819
  NULL,                       /* Finalize function.  */
820
  df_ru_free,                 /* Free all of the problem information.  */
821
  df_ru_dump,                 /* Debugging.  */
822
  NULL,                       /* Dependent problem.  */
823
 
824
};
825
 
826
 
827
 
828
/* Create a new DATAFLOW instance and add it to an existing instance
829
   of DF.  The returned structure is what is used to get at the
830
   solution.  */
831
 
832
struct dataflow *
833
df_ru_add_problem (struct df *df, int flags)
834
{
835
  return df_add_problem (df, &problem_RU, flags);
836
}
837
 
838
 
839
/*----------------------------------------------------------------------------
840
   REACHING DEFINITIONS
841
 
842
   Find the locations in the function where each definition site for a
843
   pseudo reaches.  In and out bitvectors are built for each basic
844
   block.  The id field in the ref is used to index into these sets.
845
   See df.h for details.
846
   ----------------------------------------------------------------------------*/
847
 
848
/* See the comment at the top of the Reaching Uses problem for how the
849
   uses are represented in the kill sets. The same games are played
850
   here for the defs.  */
851
 
852
/* Private data used to compute the solution for this problem.  These
853
   data structures are not accessible outside of this module.  */
854
struct df_rd_problem_data
855
{
856
  /* If the number of defs for regnum N is less than
857
     DF_SPARSE_THRESHOLD, uses_sites[N] contains a mask of the all of
858
     the defs of reg N indexed by the id in the ref structure.  If
859
     there are more than DF_SPARSE_THRESHOLD defs for regnum N a
860
     different mechanism is used to mask the def.  */
861
  bitmap *def_sites;            /* Bitmap of defs for each pseudo.  */
862
  unsigned int def_sites_size;  /* Size of def_sites.  */
863
  /* The set of defs to regs invalidated by call.  */
864
  bitmap sparse_invalidated_by_call;
865
  /* The set of defs to regs invalidate by call for rd.  */
866
  bitmap dense_invalidated_by_call;
867
};
868
 
869
/* Get basic block info.  */
870
 
871
struct df_rd_bb_info *
872
df_rd_get_bb_info (struct dataflow *dflow, unsigned int index)
873
{
874
  return (struct df_rd_bb_info *) dflow->block_info[index];
875
}
876
 
877
 
878
/* Set basic block info.  */
879
 
880
static void
881
df_rd_set_bb_info (struct dataflow *dflow, unsigned int index,
882
                   struct df_rd_bb_info *bb_info)
883
{
884
  dflow->block_info[index] = bb_info;
885
}
886
 
887
 
888
/* Free basic block info.  */
889
 
890
static void
891
df_rd_free_bb_info (struct dataflow *dflow,
892
                    basic_block bb ATTRIBUTE_UNUSED,
893
                    void *vbb_info)
894
{
895
  struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
896
  if (bb_info)
897
    {
898
      BITMAP_FREE (bb_info->kill);
899
      BITMAP_FREE (bb_info->sparse_kill);
900
      BITMAP_FREE (bb_info->gen);
901
      BITMAP_FREE (bb_info->in);
902
      BITMAP_FREE (bb_info->out);
903
      pool_free (dflow->block_pool, bb_info);
904
    }
905
}
906
 
907
 
908
/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
909
   not touched unless the block is new.  */
910
 
911
static void
912
df_rd_alloc (struct dataflow *dflow,
913
             bitmap blocks_to_rescan ATTRIBUTE_UNUSED,
914
             bitmap all_blocks)
915
{
916
  unsigned int bb_index;
917
  bitmap_iterator bi;
918
  unsigned int reg_size = max_reg_num ();
919
 
920
  if (!dflow->block_pool)
921
    dflow->block_pool = create_alloc_pool ("df_rd_block pool",
922
                                           sizeof (struct df_rd_bb_info), 50);
923
 
924
  if (dflow->problem_data)
925
    {
926
      unsigned int i;
927
      struct df_rd_problem_data *problem_data
928
        = (struct df_rd_problem_data *) dflow->problem_data;
929
 
930
      for (i = 0; i < problem_data->def_sites_size; i++)
931
        {
932
          bitmap bm = problem_data->def_sites[i];
933
          if (bm)
934
            {
935
              BITMAP_FREE (bm);
936
              problem_data->def_sites[i] = NULL;
937
            }
938
        }
939
 
940
      if (problem_data->def_sites_size < reg_size)
941
        {
942
          problem_data->def_sites
943
            = xrealloc (problem_data->def_sites, reg_size *sizeof (bitmap));
944
          memset (problem_data->def_sites + problem_data->def_sites_size, 0,
945
                  (reg_size - problem_data->def_sites_size) *sizeof (bitmap));
946
          problem_data->def_sites_size = reg_size;
947
        }
948
 
949
      bitmap_clear (problem_data->sparse_invalidated_by_call);
950
      bitmap_clear (problem_data->dense_invalidated_by_call);
951
    }
952
  else
953
    {
954
      struct df_rd_problem_data *problem_data = XNEW (struct df_rd_problem_data);
955
      dflow->problem_data = problem_data;
956
 
957
      problem_data->def_sites = XCNEWVEC (bitmap, reg_size);
958
      problem_data->def_sites_size = reg_size;
959
      problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
960
      problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
961
    }
962
 
963
  df_grow_bb_info (dflow);
964
 
965
  /* Because of the clustering of all use sites for the same pseudo,
966
     we have to process all of the blocks before doing the
967
     analysis.  */
968
 
969
  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
970
    {
971
      struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
972
      if (bb_info)
973
        {
974
          bitmap_clear (bb_info->kill);
975
          bitmap_clear (bb_info->sparse_kill);
976
          bitmap_clear (bb_info->gen);
977
        }
978
      else
979
        {
980
          bb_info = (struct df_rd_bb_info *) pool_alloc (dflow->block_pool);
981
          df_rd_set_bb_info (dflow, bb_index, bb_info);
982
          bb_info->kill = BITMAP_ALLOC (NULL);
983
          bb_info->sparse_kill = BITMAP_ALLOC (NULL);
984
          bb_info->gen = BITMAP_ALLOC (NULL);
985
          bb_info->in = BITMAP_ALLOC (NULL);
986
          bb_info->out = BITMAP_ALLOC (NULL);
987
        }
988
    }
989
}
990
 
991
 
992
/* Process a list of DEFs for df_rd_bb_local_compute.  */
993
 
994
static void
995
df_rd_bb_local_compute_process_def (struct dataflow *dflow,
996
                                    struct df_rd_bb_info *bb_info,
997
                                    struct df_ref *def,
998
                                    enum df_ref_flags top_flag)
999
{
1000
  struct df *df = dflow->df;
1001
  while (def)
1002
    {
1003
      if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
1004
        {
1005
          unsigned int regno = DF_REF_REGNO (def);
1006
          unsigned int begin = DF_REG_DEF_GET (df, regno)->begin;
1007
          unsigned int n_defs = DF_REG_DEF_GET (df, regno)->n_refs;
1008
 
1009
          /* Only the last def(s) for a regno in the block has any
1010
             effect.  */
1011
          if (!bitmap_bit_p (seen_in_block, regno))
1012
            {
1013
              /* The first def for regno in insn gets to knock out the
1014
                 defs from other instructions.  */
1015
              if ((!bitmap_bit_p (seen_in_insn, regno))
1016
                  /* If the def is to only part of the reg, it does
1017
                     not kill the other defs that reach here.  */
1018
                  && (!((DF_REF_FLAGS (def) & DF_REF_PARTIAL)
1019
                         || (DF_REF_FLAGS (def) & DF_REF_MAY_CLOBBER))))
1020
                {
1021
                  if (n_defs > DF_SPARSE_THRESHOLD)
1022
                    {
1023
                      bitmap_set_bit (bb_info->sparse_kill, regno);
1024
                      bitmap_clear_range(bb_info->gen, begin, n_defs);
1025
                    }
1026
                  else
1027
                    {
1028
                      struct df_rd_problem_data * problem_data
1029
                        = (struct df_rd_problem_data *)dflow->problem_data;
1030
                      bitmap defs = df_ref_bitmap (problem_data->def_sites,
1031
                                                   regno, begin, n_defs);
1032
                      bitmap_ior_into (bb_info->kill, defs);
1033
                      bitmap_and_compl_into (bb_info->gen, defs);
1034
                    }
1035
                }
1036
 
1037
              bitmap_set_bit (seen_in_insn, regno);
1038
              /* All defs for regno in the instruction may be put into
1039
                 the gen set.  */
1040
              if (!(DF_REF_FLAGS (def)
1041
                     & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
1042
                bitmap_set_bit (bb_info->gen, DF_REF_ID (def));
1043
            }
1044
        }
1045
      def = def->next_ref;
1046
    }
1047
}
1048
 
1049
/* Compute local reaching def info for basic block BB.  */
1050
 
1051
static void
1052
df_rd_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
1053
{
1054
  struct df *df = dflow->df;
1055
  basic_block bb = BASIC_BLOCK (bb_index);
1056
  struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1057
  rtx insn;
1058
 
1059
  bitmap_clear (seen_in_block);
1060
  bitmap_clear (seen_in_insn);
1061
 
1062
  df_rd_bb_local_compute_process_def (dflow, bb_info,
1063
                                      df_get_artificial_defs (df, bb_index), 0);
1064
 
1065
  FOR_BB_INSNS_REVERSE (bb, insn)
1066
    {
1067
      unsigned int uid = INSN_UID (insn);
1068
 
1069
      if (!INSN_P (insn))
1070
        continue;
1071
 
1072
      df_rd_bb_local_compute_process_def (dflow, bb_info,
1073
                                          DF_INSN_UID_DEFS (df, uid), 0);
1074
 
1075
      /* This complex dance with the two bitmaps is required because
1076
         instructions can assign twice to the same pseudo.  This
1077
         generally happens with calls that will have one def for the
1078
         result and another def for the clobber.  If only one vector
1079
         is used and the clobber goes first, the result will be
1080
         lost.  */
1081
      bitmap_ior_into (seen_in_block, seen_in_insn);
1082
      bitmap_clear (seen_in_insn);
1083
    }
1084
 
1085
  /* Process the artificial defs at the top of the block last since we
1086
     are going backwards through the block and these are logically at
1087
     the start.  */
1088
  df_rd_bb_local_compute_process_def (dflow, bb_info,
1089
                                      df_get_artificial_defs (df, bb_index),
1090
                                      DF_REF_AT_TOP);
1091
}
1092
 
1093
 
1094
/* Compute local reaching def info for each basic block within BLOCKS.  */
1095
 
1096
static void
1097
df_rd_local_compute (struct dataflow *dflow,
1098
                     bitmap all_blocks,
1099
                     bitmap rescan_blocks  ATTRIBUTE_UNUSED)
1100
{
1101
  struct df *df = dflow->df;
1102
  unsigned int bb_index;
1103
  bitmap_iterator bi;
1104
  unsigned int regno;
1105
  struct df_rd_problem_data *problem_data
1106
    = (struct df_rd_problem_data *) dflow->problem_data;
1107
  bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1108
  bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1109
 
1110
  df_set_seen ();
1111
 
1112
  if (!df->def_info.refs_organized)
1113
    df_reorganize_refs (&df->def_info);
1114
 
1115
  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1116
    {
1117
      df_rd_bb_local_compute (dflow, bb_index);
1118
    }
1119
 
1120
  /* Set up the knockout bit vectors to be applied across EH_EDGES.  */
1121
  EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
1122
    {
1123
      struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
1124
      if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
1125
        {
1126
          bitmap_set_bit (sparse_invalidated, regno);
1127
        }
1128
      else
1129
        {
1130
          bitmap defs = df_ref_bitmap (problem_data->def_sites, regno,
1131
                                       reg_info->begin, reg_info->n_refs);
1132
          bitmap_ior_into (dense_invalidated, defs);
1133
        }
1134
    }
1135
  df_unset_seen ();
1136
}
1137
 
1138
 
1139
/* Initialize the solution bit vectors for problem.  */
1140
 
1141
static void
1142
df_rd_init_solution (struct dataflow *dflow, bitmap all_blocks)
1143
{
1144
  unsigned int bb_index;
1145
  bitmap_iterator bi;
1146
 
1147
  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1148
    {
1149
      struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1150
 
1151
      bitmap_copy (bb_info->out, bb_info->gen);
1152
      bitmap_clear (bb_info->in);
1153
    }
1154
}
1155
 
1156
/* In of target gets or of out of source.  */
1157
 
1158
static void
1159
df_rd_confluence_n (struct dataflow *dflow, edge e)
1160
{
1161
  bitmap op1 = df_rd_get_bb_info (dflow, e->dest->index)->in;
1162
  bitmap op2 = df_rd_get_bb_info (dflow, e->src->index)->out;
1163
 
1164
  if (e->flags & EDGE_EH)
1165
    {
1166
      struct df_rd_problem_data *problem_data
1167
        = (struct df_rd_problem_data *) dflow->problem_data;
1168
      bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1169
      bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1170
      struct df *df = dflow->df;
1171
      bitmap_iterator bi;
1172
      unsigned int regno;
1173
      bitmap tmp = BITMAP_ALLOC (NULL);
1174
 
1175
      bitmap_copy (tmp, op2);
1176
      bitmap_and_compl_into (tmp, dense_invalidated);
1177
 
1178
      EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
1179
        {
1180
          bitmap_clear_range (tmp,
1181
                              DF_REG_DEF_GET (df, regno)->begin,
1182
                              DF_REG_DEF_GET (df, regno)->n_refs);
1183
        }
1184
      bitmap_ior_into (op1, tmp);
1185
      BITMAP_FREE (tmp);
1186
    }
1187
  else
1188
    bitmap_ior_into (op1, op2);
1189
}
1190
 
1191
 
1192
/* Transfer function.  */
1193
 
1194
static bool
1195
df_rd_transfer_function (struct dataflow *dflow, int bb_index)
1196
{
1197
  struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1198
  unsigned int regno;
1199
  bitmap_iterator bi;
1200
  bitmap in = bb_info->in;
1201
  bitmap out = bb_info->out;
1202
  bitmap gen = bb_info->gen;
1203
  bitmap kill = bb_info->kill;
1204
  bitmap sparse_kill = bb_info->sparse_kill;
1205
 
1206
  if (bitmap_empty_p (sparse_kill))
1207
    return  bitmap_ior_and_compl (out, gen, in, kill);
1208
  else
1209
    {
1210
      struct df *df = dflow->df;
1211
      bool changed = false;
1212
      bitmap tmp = BITMAP_ALLOC (NULL);
1213
      bitmap_copy (tmp, in);
1214
      EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
1215
        {
1216
          bitmap_clear_range (tmp,
1217
                              DF_REG_DEF_GET (df, regno)->begin,
1218
                              DF_REG_DEF_GET (df, regno)->n_refs);
1219
        }
1220
      bitmap_and_compl_into (tmp, kill);
1221
      bitmap_ior_into (tmp, gen);
1222
      changed = !bitmap_equal_p (tmp, out);
1223
      if (changed)
1224
        {
1225
          BITMAP_FREE (out);
1226
          bb_info->out = tmp;
1227
        }
1228
      else
1229
          BITMAP_FREE (tmp);
1230
      return changed;
1231
    }
1232
}
1233
 
1234
 
1235
/* Free all storage associated with the problem.  */
1236
 
1237
static void
1238
df_rd_free (struct dataflow *dflow)
1239
{
1240
  unsigned int i;
1241
  struct df_rd_problem_data *problem_data
1242
    = (struct df_rd_problem_data *) dflow->problem_data;
1243
 
1244
  if (problem_data)
1245
    {
1246
      for (i = 0; i < dflow->block_info_size; i++)
1247
        {
1248
          struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, i);
1249
          if (bb_info)
1250
            {
1251
              BITMAP_FREE (bb_info->kill);
1252
              BITMAP_FREE (bb_info->sparse_kill);
1253
              BITMAP_FREE (bb_info->gen);
1254
              BITMAP_FREE (bb_info->in);
1255
              BITMAP_FREE (bb_info->out);
1256
            }
1257
        }
1258
 
1259
      free_alloc_pool (dflow->block_pool);
1260
 
1261
      for (i = 0; i < problem_data->def_sites_size; i++)
1262
        {
1263
          bitmap bm = problem_data->def_sites[i];
1264
          if (bm)
1265
            BITMAP_FREE (bm);
1266
        }
1267
 
1268
      free (problem_data->def_sites);
1269
      BITMAP_FREE (problem_data->sparse_invalidated_by_call);
1270
      BITMAP_FREE (problem_data->dense_invalidated_by_call);
1271
 
1272
      dflow->block_info_size = 0;
1273
      free (dflow->block_info);
1274
      free (dflow->problem_data);
1275
    }
1276
  free (dflow);
1277
}
1278
 
1279
 
1280
/* Debugging info.  */
1281
 
1282
static void
1283
df_rd_dump (struct dataflow *dflow, FILE *file)
1284
{
1285
  struct df *df = dflow->df;
1286
  basic_block bb;
1287
  struct df_rd_problem_data *problem_data
1288
    = (struct df_rd_problem_data *) dflow->problem_data;
1289
  unsigned int m = max_reg_num ();
1290
  unsigned int regno;
1291
 
1292
  if (!dflow->block_info)
1293
    return;
1294
 
1295
  fprintf (file, "Reaching defs:\n\n");
1296
 
1297
  fprintf (file, "  sparse invalidated \t");
1298
  dump_bitmap (file, problem_data->sparse_invalidated_by_call);
1299
  fprintf (file, "  dense invalidated \t");
1300
  dump_bitmap (file, problem_data->dense_invalidated_by_call);
1301
 
1302
  for (regno = 0; regno < m; regno++)
1303
    if (DF_REG_DEF_GET (df, regno)->n_refs)
1304
      fprintf (file, "%d[%d,%d] ", regno,
1305
               DF_REG_DEF_GET (df, regno)->begin,
1306
               DF_REG_DEF_GET (df, regno)->n_refs);
1307
  fprintf (file, "\n");
1308
 
1309
  FOR_ALL_BB (bb)
1310
    {
1311
      struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb->index);
1312
      df_print_bb_index (bb, file);
1313
 
1314
      if (!bb_info->in)
1315
        continue;
1316
 
1317
      fprintf (file, "  in  \t(%d)\n", (int) bitmap_count_bits (bb_info->in));
1318
      dump_bitmap (file, bb_info->in);
1319
      fprintf (file, "  gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
1320
      dump_bitmap (file, bb_info->gen);
1321
      fprintf (file, "  kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
1322
      dump_bitmap (file, bb_info->kill);
1323
      fprintf (file, "  out \t(%d)\n", (int) bitmap_count_bits (bb_info->out));
1324
      dump_bitmap (file, bb_info->out);
1325
    }
1326
}
1327
 
1328
/* All of the information associated with every instance of the problem.  */
1329
 
1330
static struct df_problem problem_RD =
1331
{
1332
  DF_RD,                      /* Problem id.  */
1333
  DF_FORWARD,                 /* Direction.  */
1334
  df_rd_alloc,                /* Allocate the problem specific data.  */
1335
  NULL,                       /* Reset global information.  */
1336
  df_rd_free_bb_info,         /* Free basic block info.  */
1337
  df_rd_local_compute,        /* Local compute function.  */
1338
  df_rd_init_solution,        /* Init the solution specific data.  */
1339
  df_iterative_dataflow,      /* Iterative solver.  */
1340
  NULL,                       /* Confluence operator 0.  */
1341
  df_rd_confluence_n,         /* Confluence operator n.  */
1342
  df_rd_transfer_function,    /* Transfer function.  */
1343
  NULL,                       /* Finalize function.  */
1344
  df_rd_free,                 /* Free all of the problem information.  */
1345
  df_rd_dump,                 /* Debugging.  */
1346
  NULL,                       /* Dependent problem.  */
1347
 
1348
};
1349
 
1350
 
1351
 
1352
/* Create a new DATAFLOW instance and add it to an existing instance
1353
   of DF.  The returned structure is what is used to get at the
1354
   solution.  */
1355
 
1356
struct dataflow *
1357
df_rd_add_problem (struct df *df, int flags)
1358
{
1359
  return df_add_problem (df, &problem_RD, flags);
1360
}
1361
 
1362
 
1363
 
1364
/*----------------------------------------------------------------------------
1365
   LIVE REGISTERS
1366
 
1367
   Find the locations in the function where any use of a pseudo can
1368
   reach in the backwards direction.  In and out bitvectors are built
1369
   for each basic block.  The regnum is used to index into these sets.
1370
   See df.h for details.
1371
   ----------------------------------------------------------------------------*/
1372
 
1373
/* Get basic block info.  */
1374
 
1375
struct df_lr_bb_info *
1376
df_lr_get_bb_info (struct dataflow *dflow, unsigned int index)
1377
{
1378
  return (struct df_lr_bb_info *) dflow->block_info[index];
1379
}
1380
 
1381
 
1382
/* Set basic block info.  */
1383
 
1384
static void
1385
df_lr_set_bb_info (struct dataflow *dflow, unsigned int index,
1386
                   struct df_lr_bb_info *bb_info)
1387
{
1388
  dflow->block_info[index] = bb_info;
1389
}
1390
 
1391
 
1392
/* Free basic block info.  */
1393
 
1394
static void
1395
df_lr_free_bb_info (struct dataflow *dflow,
1396
                    basic_block bb ATTRIBUTE_UNUSED,
1397
                    void *vbb_info)
1398
{
1399
  struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
1400
  if (bb_info)
1401
    {
1402
      BITMAP_FREE (bb_info->use);
1403
      BITMAP_FREE (bb_info->def);
1404
      BITMAP_FREE (bb_info->in);
1405
      BITMAP_FREE (bb_info->out);
1406
      pool_free (dflow->block_pool, bb_info);
1407
    }
1408
}
1409
 
1410
 
1411
/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1412
   not touched unless the block is new.  */
1413
 
1414
static void
1415
df_lr_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
1416
             bitmap all_blocks ATTRIBUTE_UNUSED)
1417
{
1418
  unsigned int bb_index;
1419
  bitmap_iterator bi;
1420
 
1421
  if (!dflow->block_pool)
1422
    dflow->block_pool = create_alloc_pool ("df_lr_block pool",
1423
                                           sizeof (struct df_lr_bb_info), 50);
1424
 
1425
  df_grow_bb_info (dflow);
1426
 
1427
  EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1428
    {
1429
      struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1430
      if (bb_info)
1431
        {
1432
          bitmap_clear (bb_info->def);
1433
          bitmap_clear (bb_info->use);
1434
        }
1435
      else
1436
        {
1437
          bb_info = (struct df_lr_bb_info *) pool_alloc (dflow->block_pool);
1438
          df_lr_set_bb_info (dflow, bb_index, bb_info);
1439
          bb_info->use = BITMAP_ALLOC (NULL);
1440
          bb_info->def = BITMAP_ALLOC (NULL);
1441
          bb_info->in = BITMAP_ALLOC (NULL);
1442
          bb_info->out = BITMAP_ALLOC (NULL);
1443
        }
1444
    }
1445
}
1446
 
1447
 
1448
/* Compute local live register info for basic block BB.  */
1449
 
1450
static void
1451
df_lr_bb_local_compute (struct dataflow *dflow,
1452
                        struct df *df, unsigned int bb_index)
1453
{
1454
  basic_block bb = BASIC_BLOCK (bb_index);
1455
  struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1456
  rtx insn;
1457
  struct df_ref *def;
1458
  struct df_ref *use;
1459
 
1460
  /* Process the registers set in an exception handler.  */
1461
  for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1462
    if (((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
1463
        && (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL)))
1464
      {
1465
        unsigned int dregno = DF_REF_REGNO (def);
1466
        bitmap_set_bit (bb_info->def, dregno);
1467
        bitmap_clear_bit (bb_info->use, dregno);
1468
      }
1469
 
1470
  /* Process the hardware registers that are always live.  */
1471
  for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1472
    /* Add use to set of uses in this BB.  */
1473
    if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
1474
      bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1475
 
1476
  FOR_BB_INSNS_REVERSE (bb, insn)
1477
    {
1478
      unsigned int uid = INSN_UID (insn);
1479
 
1480
      if (!INSN_P (insn))
1481
        continue;
1482
 
1483
      if (CALL_P (insn))
1484
        {
1485
          for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
1486
            {
1487
              unsigned int dregno = DF_REF_REGNO (def);
1488
 
1489
              if (dregno >= FIRST_PSEUDO_REGISTER
1490
                  || !(SIBLING_CALL_P (insn)
1491
                       && bitmap_bit_p (df->exit_block_uses, dregno)
1492
                       && !refers_to_regno_p (dregno, dregno+1,
1493
                                              current_function_return_rtx,
1494
                                              (rtx *)0)))
1495
                {
1496
                  /* If the def is to only part of the reg, it does
1497
                     not kill the other defs that reach here.  */
1498
                  if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
1499
                    {
1500
                      bitmap_set_bit (bb_info->def, dregno);
1501
                      bitmap_clear_bit (bb_info->use, dregno);
1502
                    }
1503
                }
1504
            }
1505
        }
1506
      else
1507
        {
1508
          for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
1509
            {
1510
              unsigned int dregno = DF_REF_REGNO (def);
1511
 
1512
              if (DF_INSN_CONTAINS_ASM (df, insn)
1513
                  && dregno < FIRST_PSEUDO_REGISTER)
1514
                {
1515
                  unsigned int i;
1516
                  unsigned int end = dregno
1517
                    + hard_regno_nregs[dregno][GET_MODE (DF_REF_REG (def))] - 1;
1518
                  for (i = dregno; i <= end; ++i)
1519
                    regs_asm_clobbered[i] = 1;
1520
                }
1521
              /* If the def is to only part of the reg, it does
1522
                     not kill the other defs that reach here.  */
1523
              if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
1524
                {
1525
                  bitmap_set_bit (bb_info->def, dregno);
1526
                  bitmap_clear_bit (bb_info->use, dregno);
1527
                }
1528
            }
1529
        }
1530
 
1531
      for (use = DF_INSN_UID_USES (df, uid); use; use = use->next_ref)
1532
        /* Add use to set of uses in this BB.  */
1533
        bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1534
    }
1535
 
1536
  /* Process the registers set in an exception handler.  */
1537
  for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1538
    if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP)
1539
        && (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL)))
1540
      {
1541
        unsigned int dregno = DF_REF_REGNO (def);
1542
        bitmap_set_bit (bb_info->def, dregno);
1543
        bitmap_clear_bit (bb_info->use, dregno);
1544
      }
1545
 
1546
#ifdef EH_USES
1547
  /* Process the uses that are live into an exception handler.  */
1548
  for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1549
    /* Add use to set of uses in this BB.  */
1550
    if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
1551
      bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1552
#endif
1553
}
1554
 
1555
 
1556
/* Compute local live register info for each basic block within BLOCKS.  */
1557
 
1558
static void
1559
df_lr_local_compute (struct dataflow *dflow,
1560
                     bitmap all_blocks,
1561
                     bitmap rescan_blocks)
1562
{
1563
  struct df *df = dflow->df;
1564
  unsigned int bb_index;
1565
  bitmap_iterator bi;
1566
 
1567
  /* Assume that the stack pointer is unchanging if alloca hasn't
1568
     been used.  */
1569
  if (bitmap_equal_p (all_blocks, rescan_blocks))
1570
    memset (regs_asm_clobbered, 0, sizeof (regs_asm_clobbered));
1571
 
1572
  bitmap_clear (df->hardware_regs_used);
1573
 
1574
  /* The all-important stack pointer must always be live.  */
1575
  bitmap_set_bit (df->hardware_regs_used, STACK_POINTER_REGNUM);
1576
 
1577
  /* Before reload, there are a few registers that must be forced
1578
     live everywhere -- which might not already be the case for
1579
     blocks within infinite loops.  */
1580
  if (!reload_completed)
1581
    {
1582
      /* Any reference to any pseudo before reload is a potential
1583
         reference of the frame pointer.  */
1584
      bitmap_set_bit (df->hardware_regs_used, FRAME_POINTER_REGNUM);
1585
 
1586
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1587
      /* Pseudos with argument area equivalences may require
1588
         reloading via the argument pointer.  */
1589
      if (fixed_regs[ARG_POINTER_REGNUM])
1590
        bitmap_set_bit (df->hardware_regs_used, ARG_POINTER_REGNUM);
1591
#endif
1592
 
1593
      /* Any constant, or pseudo with constant equivalences, may
1594
         require reloading from memory using the pic register.  */
1595
      if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1596
          && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1597
        bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM);
1598
    }
1599
 
1600
  if (bitmap_bit_p (rescan_blocks, EXIT_BLOCK))
1601
    {
1602
      /* The exit block is special for this problem and its bits are
1603
         computed from thin air.  */
1604
      struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, EXIT_BLOCK);
1605
      bitmap_copy (bb_info->use, df->exit_block_uses);
1606
    }
1607
 
1608
  EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1609
    {
1610
      if (bb_index == EXIT_BLOCK)
1611
        continue;
1612
      df_lr_bb_local_compute (dflow, df, bb_index);
1613
    }
1614
}
1615
 
1616
 
1617
/* Initialize the solution vectors.  */
1618
 
1619
static void
1620
df_lr_init (struct dataflow *dflow, bitmap all_blocks)
1621
{
1622
  unsigned int bb_index;
1623
  bitmap_iterator bi;
1624
 
1625
  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1626
    {
1627
      struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1628
      bitmap_copy (bb_info->in, bb_info->use);
1629
      bitmap_clear (bb_info->out);
1630
    }
1631
}
1632
 
1633
 
1634
/* Confluence function that processes infinite loops.  This might be a
1635
   noreturn function that throws.  And even if it isn't, getting the
1636
   unwind info right helps debugging.  */
1637
static void
1638
df_lr_confluence_0 (struct dataflow *dflow, basic_block bb)
1639
{
1640
  struct df *df = dflow->df;
1641
 
1642
  bitmap op1 = df_lr_get_bb_info (dflow, bb->index)->out;
1643
  if (bb != EXIT_BLOCK_PTR)
1644
    bitmap_copy (op1, df->hardware_regs_used);
1645
}
1646
 
1647
 
1648
/* Confluence function that ignores fake edges.  */
1649
 
1650
static void
1651
df_lr_confluence_n (struct dataflow *dflow, edge e)
1652
{
1653
  bitmap op1 = df_lr_get_bb_info (dflow, e->src->index)->out;
1654
  bitmap op2 = df_lr_get_bb_info (dflow, e->dest->index)->in;
1655
 
1656
  /* Call-clobbered registers die across exception and call edges.  */
1657
  /* ??? Abnormal call edges ignored for the moment, as this gets
1658
     confused by sibling call edges, which crashes reg-stack.  */
1659
  if (e->flags & EDGE_EH)
1660
    bitmap_ior_and_compl_into (op1, op2, df_invalidated_by_call);
1661
  else
1662
    bitmap_ior_into (op1, op2);
1663
 
1664
  bitmap_ior_into (op1, dflow->df->hardware_regs_used);
1665
}
1666
 
1667
 
1668
/* Transfer function.  */
1669
 
1670
static bool
1671
df_lr_transfer_function (struct dataflow *dflow, int bb_index)
1672
{
1673
  struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1674
  bitmap in = bb_info->in;
1675
  bitmap out = bb_info->out;
1676
  bitmap use = bb_info->use;
1677
  bitmap def = bb_info->def;
1678
 
1679
  return bitmap_ior_and_compl (in, use, out, def);
1680
}
1681
 
1682
 
1683
/* Free all storage associated with the problem.  */
1684
 
1685
static void
1686
df_lr_free (struct dataflow *dflow)
1687
{
1688
  if (dflow->block_info)
1689
    {
1690
      unsigned int i;
1691
      for (i = 0; i < dflow->block_info_size; i++)
1692
        {
1693
          struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, i);
1694
          if (bb_info)
1695
            {
1696
              BITMAP_FREE (bb_info->use);
1697
              BITMAP_FREE (bb_info->def);
1698
              BITMAP_FREE (bb_info->in);
1699
              BITMAP_FREE (bb_info->out);
1700
            }
1701
        }
1702
      free_alloc_pool (dflow->block_pool);
1703
 
1704
      dflow->block_info_size = 0;
1705
      free (dflow->block_info);
1706
    }
1707
 
1708
  free (dflow->problem_data);
1709
  free (dflow);
1710
}
1711
 
1712
 
1713
/* Debugging info.  */
1714
 
1715
static void
1716
df_lr_dump (struct dataflow *dflow, FILE *file)
1717
{
1718
  basic_block bb;
1719
 
1720
  if (!dflow->block_info)
1721
    return;
1722
 
1723
  fprintf (file, "Live Registers:\n");
1724
  FOR_ALL_BB (bb)
1725
    {
1726
      struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb->index);
1727
      df_print_bb_index (bb, file);
1728
 
1729
      if (!bb_info->in)
1730
        continue;
1731
 
1732
      fprintf (file, "  in  \t");
1733
      dump_bitmap (file, bb_info->in);
1734
      fprintf (file, "  use \t");
1735
      dump_bitmap (file, bb_info->use);
1736
      fprintf (file, "  def \t");
1737
      dump_bitmap (file, bb_info->def);
1738
      fprintf (file, "  out \t");
1739
      dump_bitmap (file, bb_info->out);
1740
    }
1741
}
1742
 
1743
/* All of the information associated with every instance of the problem.  */
1744
 
1745
static struct df_problem problem_LR =
1746
{
1747
  DF_LR,                      /* Problem id.  */
1748
  DF_BACKWARD,                /* Direction.  */
1749
  df_lr_alloc,                /* Allocate the problem specific data.  */
1750
  NULL,                       /* Reset global information.  */
1751
  df_lr_free_bb_info,         /* Free basic block info.  */
1752
  df_lr_local_compute,        /* Local compute function.  */
1753
  df_lr_init,                 /* Init the solution specific data.  */
1754
  df_iterative_dataflow,      /* Iterative solver.  */
1755
  df_lr_confluence_0,         /* Confluence operator 0.  */
1756
  df_lr_confluence_n,         /* Confluence operator n.  */
1757
  df_lr_transfer_function,    /* Transfer function.  */
1758
  NULL,                       /* Finalize function.  */
1759
  df_lr_free,                 /* Free all of the problem information.  */
1760
  df_lr_dump,                 /* Debugging.  */
1761
  NULL,                       /* Dependent problem.  */
1762
 
1763
};
1764
 
1765
 
1766
/* Create a new DATAFLOW instance and add it to an existing instance
1767
   of DF.  The returned structure is what is used to get at the
1768
   solution.  */
1769
 
1770
struct dataflow *
1771
df_lr_add_problem (struct df *df, int flags)
1772
{
1773
  return df_add_problem (df, &problem_LR, flags);
1774
}
1775
 
1776
 
1777
 
1778
/*----------------------------------------------------------------------------
1779
   UNINITIALIZED REGISTERS
1780
 
1781
   Find the set of uses for registers that are reachable from the entry
1782
   block without passing thru a definition.  In and out bitvectors are built
1783
   for each basic block.  The regnum is used to index into these sets.
1784
   See df.h for details.
1785
----------------------------------------------------------------------------*/
1786
 
1787
/* Get basic block info.  */
1788
 
1789
struct df_ur_bb_info *
1790
df_ur_get_bb_info (struct dataflow *dflow, unsigned int index)
1791
{
1792
  return (struct df_ur_bb_info *) dflow->block_info[index];
1793
}
1794
 
1795
 
1796
/* Set basic block info.  */
1797
 
1798
static void
1799
df_ur_set_bb_info (struct dataflow *dflow, unsigned int index,
1800
                   struct df_ur_bb_info *bb_info)
1801
{
1802
  dflow->block_info[index] = bb_info;
1803
}
1804
 
1805
 
1806
/* Free basic block info.  */
1807
 
1808
static void
1809
df_ur_free_bb_info (struct dataflow *dflow,
1810
                    basic_block bb ATTRIBUTE_UNUSED,
1811
                    void *vbb_info)
1812
{
1813
  struct df_ur_bb_info *bb_info = (struct df_ur_bb_info *) vbb_info;
1814
  if (bb_info)
1815
    {
1816
      BITMAP_FREE (bb_info->gen);
1817
      BITMAP_FREE (bb_info->kill);
1818
      BITMAP_FREE (bb_info->in);
1819
      BITMAP_FREE (bb_info->out);
1820
      pool_free (dflow->block_pool, bb_info);
1821
    }
1822
}
1823
 
1824
 
1825
/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1826
   not touched unless the block is new.  */
1827
 
1828
static void
1829
df_ur_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
1830
             bitmap all_blocks ATTRIBUTE_UNUSED)
1831
{
1832
  unsigned int bb_index;
1833
  bitmap_iterator bi;
1834
 
1835
  if (!dflow->block_pool)
1836
    dflow->block_pool = create_alloc_pool ("df_ur_block pool",
1837
                                           sizeof (struct df_ur_bb_info), 100);
1838
 
1839
  df_grow_bb_info (dflow);
1840
 
1841
  EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1842
    {
1843
      struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1844
      if (bb_info)
1845
        {
1846
          bitmap_clear (bb_info->kill);
1847
          bitmap_clear (bb_info->gen);
1848
        }
1849
      else
1850
        {
1851
          bb_info = (struct df_ur_bb_info *) pool_alloc (dflow->block_pool);
1852
          df_ur_set_bb_info (dflow, bb_index, bb_info);
1853
          bb_info->kill = BITMAP_ALLOC (NULL);
1854
          bb_info->gen = BITMAP_ALLOC (NULL);
1855
          bb_info->in = BITMAP_ALLOC (NULL);
1856
          bb_info->out = BITMAP_ALLOC (NULL);
1857
        }
1858
    }
1859
}
1860
 
1861
 
1862
/* Compute local uninitialized register info for basic block BB.  */
1863
 
1864
static void
1865
df_ur_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
1866
{
1867
  struct df *df = dflow->df;
1868
  basic_block bb = BASIC_BLOCK (bb_index);
1869
  struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1870
  rtx insn;
1871
  struct df_ref *def;
1872
 
1873
  bitmap_clear (seen_in_block);
1874
  bitmap_clear (seen_in_insn);
1875
 
1876
  for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1877
    if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
1878
      {
1879
        unsigned int regno = DF_REF_REGNO (def);
1880
        if (!bitmap_bit_p (seen_in_block, regno))
1881
          {
1882
            bitmap_set_bit (seen_in_block, regno);
1883
            bitmap_set_bit (bb_info->gen, regno);
1884
          }
1885
      }
1886
 
1887
  FOR_BB_INSNS_REVERSE (bb, insn)
1888
    {
1889
      unsigned int uid = INSN_UID (insn);
1890
      if (!INSN_P (insn))
1891
        continue;
1892
 
1893
      for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
1894
        {
1895
          unsigned int regno = DF_REF_REGNO (def);
1896
          /* Only the last def counts.  */
1897
          if (!bitmap_bit_p (seen_in_block, regno))
1898
            {
1899
              bitmap_set_bit (seen_in_insn, regno);
1900
 
1901
              if (DF_REF_FLAGS (def)
1902
                  & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
1903
                {
1904
                  /* Only must clobbers for the entire reg destroy the
1905
                     value.  */
1906
                  if ((DF_REF_FLAGS (def) & DF_REF_MUST_CLOBBER)
1907
                      && (!DF_REF_FLAGS (def) & DF_REF_PARTIAL))
1908
                    bitmap_set_bit (bb_info->kill, regno);
1909
                }
1910
              else
1911
                bitmap_set_bit (bb_info->gen, regno);
1912
            }
1913
        }
1914
      bitmap_ior_into (seen_in_block, seen_in_insn);
1915
      bitmap_clear (seen_in_insn);
1916
    }
1917
 
1918
  for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1919
    if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
1920
      {
1921
        unsigned int regno = DF_REF_REGNO (def);
1922
        if (!bitmap_bit_p (seen_in_block, regno))
1923
          {
1924
            bitmap_set_bit (seen_in_block, regno);
1925
            bitmap_set_bit (bb_info->gen, regno);
1926
          }
1927
      }
1928
}
1929
 
1930
 
1931
/* Compute local uninitialized register info.  */
1932
 
1933
static void
1934
df_ur_local_compute (struct dataflow *dflow,
1935
                     bitmap all_blocks ATTRIBUTE_UNUSED,
1936
                     bitmap rescan_blocks)
1937
{
1938
  unsigned int bb_index;
1939
  bitmap_iterator bi;
1940
 
1941
  df_set_seen ();
1942
 
1943
  EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1944
    {
1945
      df_ur_bb_local_compute (dflow, bb_index);
1946
    }
1947
 
1948
  df_unset_seen ();
1949
}
1950
 
1951
 
1952
/* Initialize the solution vectors.  */
1953
 
1954
static void
1955
df_ur_init (struct dataflow *dflow, bitmap all_blocks)
1956
{
1957
  unsigned int bb_index;
1958
  bitmap_iterator bi;
1959
 
1960
  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1961
    {
1962
      struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1963
 
1964
      bitmap_copy (bb_info->out, bb_info->gen);
1965
      bitmap_clear (bb_info->in);
1966
    }
1967
}
1968
 
1969
 
1970
/* Or in the stack regs, hard regs and early clobber regs into the the
1971
   ur_in sets of all of the blocks.  */
1972
 
1973
static void
1974
df_ur_local_finalize (struct dataflow *dflow, bitmap all_blocks)
1975
{
1976
  struct df *df = dflow->df;
1977
  struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
1978
  bitmap tmp = BITMAP_ALLOC (NULL);
1979
  bitmap_iterator bi;
1980
  unsigned int bb_index;
1981
 
1982
  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1983
    {
1984
      struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1985
      struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
1986
 
1987
      /* No register may reach a location where it is not used.  Thus
1988
         we trim the rr result to the places where it is used.  */
1989
      bitmap_and_into (bb_info->in, bb_lr_info->in);
1990
      bitmap_and_into (bb_info->out, bb_lr_info->out);
1991
 
1992
#if 1
1993
      /* Hard registers may still stick in the ur_out set, but not
1994
         be in the ur_in set, if their only mention was in a call
1995
         in this block.  This is because a call kills in the lr
1996
         problem but does not kill in the ur problem.  To clean
1997
         this up, we execute the transfer function on the lr_in
1998
         set and then use that to knock bits out of ur_out.  */
1999
      bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
2000
                            bb_info->kill);
2001
      bitmap_and_into (bb_info->out, tmp);
2002
#endif
2003
    }
2004
 
2005
  BITMAP_FREE (tmp);
2006
}
2007
 
2008
 
2009
/* Confluence function that ignores fake edges.  */
2010
 
2011
static void
2012
df_ur_confluence_n (struct dataflow *dflow, edge e)
2013
{
2014
  bitmap op1 = df_ur_get_bb_info (dflow, e->dest->index)->in;
2015
  bitmap op2 = df_ur_get_bb_info (dflow, e->src->index)->out;
2016
 
2017
  if (e->flags & EDGE_FAKE)
2018
    return;
2019
 
2020
  bitmap_ior_into (op1, op2);
2021
}
2022
 
2023
 
2024
/* Transfer function.  */
2025
 
2026
static bool
2027
df_ur_transfer_function (struct dataflow *dflow, int bb_index)
2028
{
2029
  struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
2030
  bitmap in = bb_info->in;
2031
  bitmap out = bb_info->out;
2032
  bitmap gen = bb_info->gen;
2033
  bitmap kill = bb_info->kill;
2034
 
2035
  return bitmap_ior_and_compl (out, gen, in, kill);
2036
}
2037
 
2038
 
2039
/* Free all storage associated with the problem.  */
2040
 
2041
static void
2042
df_ur_free (struct dataflow *dflow)
2043
{
2044
  if (dflow->block_info)
2045
    {
2046
      unsigned int i;
2047
 
2048
      for (i = 0; i < dflow->block_info_size; i++)
2049
        {
2050
          struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, i);
2051
          if (bb_info)
2052
            {
2053
              BITMAP_FREE (bb_info->gen);
2054
              BITMAP_FREE (bb_info->kill);
2055
              BITMAP_FREE (bb_info->in);
2056
              BITMAP_FREE (bb_info->out);
2057
            }
2058
        }
2059
 
2060
      free_alloc_pool (dflow->block_pool);
2061
      dflow->block_info_size = 0;
2062
      free (dflow->block_info);
2063
    }
2064
  free (dflow);
2065
}
2066
 
2067
 
2068
/* Debugging info.  */
2069
 
2070
static void
2071
df_ur_dump (struct dataflow *dflow, FILE *file)
2072
{
2073
  basic_block bb;
2074
 
2075
  if (!dflow->block_info)
2076
    return;
2077
 
2078
  fprintf (file, "Undefined regs:\n");
2079
 
2080
  FOR_ALL_BB (bb)
2081
    {
2082
      struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb->index);
2083
      df_print_bb_index (bb, file);
2084
 
2085
      if (!bb_info->in)
2086
        continue;
2087
 
2088
      fprintf (file, "  in  \t");
2089
      dump_bitmap (file, bb_info->in);
2090
      fprintf (file, "  gen \t");
2091
      dump_bitmap (file, bb_info->gen);
2092
      fprintf (file, "  kill\t");
2093
      dump_bitmap (file, bb_info->kill);
2094
      fprintf (file, "  out \t");
2095
      dump_bitmap (file, bb_info->out);
2096
    }
2097
}
2098
 
2099
/* All of the information associated with every instance of the problem.  */
2100
 
2101
static struct df_problem problem_UR =
2102
{
2103
  DF_UR,                      /* Problem id.  */
2104
  DF_FORWARD,                 /* Direction.  */
2105
  df_ur_alloc,                /* Allocate the problem specific data.  */
2106
  NULL,                       /* Reset global information.  */
2107
  df_ur_free_bb_info,         /* Free basic block info.  */
2108
  df_ur_local_compute,        /* Local compute function.  */
2109
  df_ur_init,                 /* Init the solution specific data.  */
2110
  df_iterative_dataflow,      /* Iterative solver.  */
2111
  NULL,                       /* Confluence operator 0.  */
2112
  df_ur_confluence_n,         /* Confluence operator n.  */
2113
  df_ur_transfer_function,    /* Transfer function.  */
2114
  df_ur_local_finalize,       /* Finalize function.  */
2115
  df_ur_free,                 /* Free all of the problem information.  */
2116
  df_ur_dump,                 /* Debugging.  */
2117
  df_lr_add_problem,          /* Dependent problem.  */
2118
 
2119
};
2120
 
2121
 
2122
/* Create a new DATAFLOW instance and add it to an existing instance
2123
   of DF.  The returned structure is what is used to get at the
2124
   solution.  */
2125
 
2126
struct dataflow *
2127
df_ur_add_problem (struct df *df, int flags)
2128
{
2129
  return df_add_problem (df, &problem_UR, flags);
2130
}
2131
 
2132
 
2133
 
2134
/*----------------------------------------------------------------------------
2135
   UNINITIALIZED REGISTERS WITH EARLYCLOBBER
2136
 
2137
   Find the set of uses for registers that are reachable from the entry
2138
   block without passing thru a definition.  In and out bitvectors are built
2139
   for each basic block.  The regnum is used to index into these sets.
2140
   See df.h for details.
2141
 
2142
   This is a variant of the UR problem above that has a lot of special
2143
   features just for the register allocation phase.  This problem
2144
   should go a away if someone would fix the interference graph.
2145
 
2146
   ----------------------------------------------------------------------------*/
2147
 
2148
/* Private data used to compute the solution for this problem.  These
2149
   data structures are not accessible outside of this module.  */
2150
struct df_urec_problem_data
2151
{
2152
  bool earlyclobbers_found;     /* True if any instruction contains an
2153
                                   earlyclobber.  */
2154
#ifdef STACK_REGS
2155
  bitmap stack_regs;            /* Registers that may be allocated to a STACK_REGS.  */
2156
#endif
2157
};
2158
 
2159
 
2160
/* Get basic block info.  */
2161
 
2162
struct df_urec_bb_info *
2163
df_urec_get_bb_info (struct dataflow *dflow, unsigned int index)
2164
{
2165
  return (struct df_urec_bb_info *) dflow->block_info[index];
2166
}
2167
 
2168
 
2169
/* Set basic block info.  */
2170
 
2171
static void
2172
df_urec_set_bb_info (struct dataflow *dflow, unsigned int index,
2173
                   struct df_urec_bb_info *bb_info)
2174
{
2175
  dflow->block_info[index] = bb_info;
2176
}
2177
 
2178
 
2179
/* Free basic block info.  */
2180
 
2181
static void
2182
df_urec_free_bb_info (struct dataflow *dflow,
2183
                      basic_block bb ATTRIBUTE_UNUSED,
2184
                      void *vbb_info)
2185
{
2186
  struct df_urec_bb_info *bb_info = (struct df_urec_bb_info *) vbb_info;
2187
  if (bb_info)
2188
    {
2189
      BITMAP_FREE (bb_info->gen);
2190
      BITMAP_FREE (bb_info->kill);
2191
      BITMAP_FREE (bb_info->in);
2192
      BITMAP_FREE (bb_info->out);
2193
      BITMAP_FREE (bb_info->earlyclobber);
2194
      pool_free (dflow->block_pool, bb_info);
2195
    }
2196
}
2197
 
2198
 
2199
/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
2200
   not touched unless the block is new.  */
2201
 
2202
static void
2203
df_urec_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
2204
               bitmap all_blocks ATTRIBUTE_UNUSED)
2205
 
2206
{
2207
  unsigned int bb_index;
2208
  bitmap_iterator bi;
2209
  struct df_urec_problem_data *problem_data
2210
    = (struct df_urec_problem_data *) dflow->problem_data;
2211
 
2212
  if (!dflow->block_pool)
2213
    dflow->block_pool = create_alloc_pool ("df_urec_block pool",
2214
                                           sizeof (struct df_urec_bb_info), 50);
2215
 
2216
  if (!dflow->problem_data)
2217
    {
2218
      problem_data = XNEW (struct df_urec_problem_data);
2219
      dflow->problem_data = problem_data;
2220
    }
2221
  problem_data->earlyclobbers_found = false;
2222
 
2223
  df_grow_bb_info (dflow);
2224
 
2225
  EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
2226
    {
2227
      struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2228
      if (bb_info)
2229
        {
2230
          bitmap_clear (bb_info->kill);
2231
          bitmap_clear (bb_info->gen);
2232
          bitmap_clear (bb_info->earlyclobber);
2233
        }
2234
      else
2235
        {
2236
          bb_info = (struct df_urec_bb_info *) pool_alloc (dflow->block_pool);
2237
          df_urec_set_bb_info (dflow, bb_index, bb_info);
2238
          bb_info->kill = BITMAP_ALLOC (NULL);
2239
          bb_info->gen = BITMAP_ALLOC (NULL);
2240
          bb_info->in = BITMAP_ALLOC (NULL);
2241
          bb_info->out = BITMAP_ALLOC (NULL);
2242
          bb_info->earlyclobber = BITMAP_ALLOC (NULL);
2243
        }
2244
    }
2245
}
2246
 
2247
 
2248
/* The function modifies local info for register REG being changed in
2249
   SETTER.  DATA is used to pass the current basic block info.  */
2250
 
2251
static void
2252
df_urec_mark_reg_change (rtx reg, rtx setter, void *data)
2253
{
2254
  int regno;
2255
  int endregno;
2256
  int i;
2257
  struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2258
 
2259
  if (GET_CODE (reg) == SUBREG)
2260
    reg = SUBREG_REG (reg);
2261
 
2262
  if (!REG_P (reg))
2263
    return;
2264
 
2265
 
2266
  endregno = regno = REGNO (reg);
2267
  if (regno < FIRST_PSEUDO_REGISTER)
2268
    {
2269
      endregno +=hard_regno_nregs[regno][GET_MODE (reg)];
2270
 
2271
      for (i = regno; i < endregno; i++)
2272
        {
2273
          bitmap_set_bit (bb_info->kill, i);
2274
 
2275
          if (GET_CODE (setter) != CLOBBER)
2276
            bitmap_set_bit (bb_info->gen, i);
2277
          else
2278
            bitmap_clear_bit (bb_info->gen, i);
2279
        }
2280
    }
2281
  else
2282
    {
2283
      bitmap_set_bit (bb_info->kill, regno);
2284
 
2285
      if (GET_CODE (setter) != CLOBBER)
2286
        bitmap_set_bit (bb_info->gen, regno);
2287
      else
2288
        bitmap_clear_bit (bb_info->gen, regno);
2289
    }
2290
}
2291
/* Classes of registers which could be early clobbered in the current
2292
   insn.  */
2293
 
2294
static VEC(int,heap) *earlyclobber_regclass;
2295
 
2296
/* This function finds and stores register classes that could be early
2297
   clobbered in INSN.  If any earlyclobber classes are found, the function
2298
   returns TRUE, in all other cases it returns FALSE.  */
2299
 
2300
static bool
2301
df_urec_check_earlyclobber (rtx insn)
2302
{
2303
  int opno;
2304
  bool found = false;
2305
 
2306
  extract_insn (insn);
2307
 
2308
  VEC_truncate (int, earlyclobber_regclass, 0);
2309
  for (opno = 0; opno < recog_data.n_operands; opno++)
2310
    {
2311
      char c;
2312
      bool amp_p;
2313
      int i;
2314
      enum reg_class class;
2315
      const char *p = recog_data.constraints[opno];
2316
 
2317
      class = NO_REGS;
2318
      amp_p = false;
2319
      for (;;)
2320
        {
2321
          c = *p;
2322
          switch (c)
2323
            {
2324
            case '=':  case '+':  case '?':
2325
            case '#':  case '!':
2326
            case '*':  case '%':
2327
            case 'm':  case '<':  case '>':  case 'V':  case 'o':
2328
            case 'E':  case 'F':  case 'G':  case 'H':
2329
            case 's':  case 'i':  case 'n':
2330
            case 'I':  case 'J':  case 'K':  case 'L':
2331
            case 'M':  case 'N':  case 'O':  case 'P':
2332
            case 'X':
2333
            case '0': case '1':  case '2':  case '3':  case '4':
2334
            case '5': case '6':  case '7':  case '8':  case '9':
2335
              /* These don't say anything we care about.  */
2336
              break;
2337
 
2338
            case '&':
2339
              amp_p = true;
2340
              break;
2341
            case '\0':
2342
            case ',':
2343
              if (amp_p && class != NO_REGS)
2344
                {
2345
                  int rc;
2346
 
2347
                  found = true;
2348
                  for (i = 0;
2349
                       VEC_iterate (int, earlyclobber_regclass, i, rc);
2350
                       i++)
2351
                    {
2352
                      if (rc == (int) class)
2353
                        goto found_rc;
2354
                    }
2355
 
2356
                  /* We use VEC_quick_push here because
2357
                     earlyclobber_regclass holds no more than
2358
                     N_REG_CLASSES elements. */
2359
                  VEC_quick_push (int, earlyclobber_regclass, (int) class);
2360
                found_rc:
2361
                  ;
2362
                }
2363
 
2364
              amp_p = false;
2365
              class = NO_REGS;
2366
              break;
2367
 
2368
            case 'r':
2369
              class = GENERAL_REGS;
2370
              break;
2371
 
2372
            default:
2373
              class = REG_CLASS_FROM_CONSTRAINT (c, p);
2374
              break;
2375
            }
2376
          if (c == '\0')
2377
            break;
2378
          p += CONSTRAINT_LEN (c, p);
2379
        }
2380
    }
2381
 
2382
  return found;
2383
}
2384
 
2385
/* The function checks that pseudo-register *X has a class
2386
   intersecting with the class of pseudo-register could be early
2387
   clobbered in the same insn.
2388
 
2389
   This function is a no-op if earlyclobber_regclass is empty.
2390
 
2391
   Reload can assign the same hard register to uninitialized
2392
   pseudo-register and early clobbered pseudo-register in an insn if
2393
   the pseudo-register is used first time in given BB and not lived at
2394
   the BB start.  To prevent this we don't change life information for
2395
   such pseudo-registers.  */
2396
 
2397
static int
2398
df_urec_mark_reg_use_for_earlyclobber (rtx *x, void *data)
2399
{
2400
  enum reg_class pref_class, alt_class;
2401
  int i, regno;
2402
  struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2403
 
2404
  if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2405
    {
2406
      int rc;
2407
 
2408
      regno = REGNO (*x);
2409
      if (bitmap_bit_p (bb_info->kill, regno)
2410
          || bitmap_bit_p (bb_info->gen, regno))
2411
        return 0;
2412
      pref_class = reg_preferred_class (regno);
2413
      alt_class = reg_alternate_class (regno);
2414
      for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
2415
        {
2416
          if (reg_classes_intersect_p (rc, pref_class)
2417
              || (rc != NO_REGS
2418
                  && reg_classes_intersect_p (rc, alt_class)))
2419
            {
2420
              bitmap_set_bit (bb_info->earlyclobber, regno);
2421
              break;
2422
            }
2423
        }
2424
    }
2425
  return 0;
2426
}
2427
 
2428
/* The function processes all pseudo-registers in *X with the aid of
2429
   previous function.  */
2430
 
2431
static void
2432
df_urec_mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2433
{
2434
  for_each_rtx (x, df_urec_mark_reg_use_for_earlyclobber, data);
2435
}
2436
 
2437
 
2438
/* Compute local uninitialized register info for basic block BB.  */
2439
 
2440
static void
2441
df_urec_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
2442
{
2443
  struct df *df = dflow->df;
2444
  basic_block bb = BASIC_BLOCK (bb_index);
2445
  struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2446
  rtx insn;
2447
  struct df_ref *def;
2448
 
2449
  for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2450
    if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
2451
      {
2452
        unsigned int regno = DF_REF_REGNO (def);
2453
        bitmap_set_bit (bb_info->gen, regno);
2454
      }
2455
 
2456
  FOR_BB_INSNS (bb, insn)
2457
    {
2458
      if (INSN_P (insn))
2459
        {
2460
          note_stores (PATTERN (insn), df_urec_mark_reg_change, bb_info);
2461
          if (df_urec_check_earlyclobber (insn))
2462
            {
2463
              struct df_urec_problem_data *problem_data
2464
                = (struct df_urec_problem_data *) dflow->problem_data;
2465
              problem_data->earlyclobbers_found = true;
2466
              note_uses (&PATTERN (insn),
2467
                         df_urec_mark_reg_use_for_earlyclobber_1, bb_info);
2468
            }
2469
        }
2470
    }
2471
 
2472
  for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2473
    if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
2474
      {
2475
        unsigned int regno = DF_REF_REGNO (def);
2476
        bitmap_set_bit (bb_info->gen, regno);
2477
      }
2478
 
2479
}
2480
 
2481
 
2482
/* Compute local uninitialized register info.  */
2483
 
2484
static void
2485
df_urec_local_compute (struct dataflow *dflow,
2486
                     bitmap all_blocks ATTRIBUTE_UNUSED,
2487
                     bitmap rescan_blocks)
2488
{
2489
  unsigned int bb_index;
2490
  bitmap_iterator bi;
2491
#ifdef STACK_REGS
2492
  int i;
2493
  HARD_REG_SET zero, stack_hard_regs, used;
2494
  struct df_urec_problem_data *problem_data
2495
    = (struct df_urec_problem_data *) dflow->problem_data;
2496
 
2497
  /* Any register that MAY be allocated to a register stack (like the
2498
     387) is treated poorly.  Each such register is marked as being
2499
     live everywhere.  This keeps the register allocator and the
2500
     subsequent passes from doing anything useful with these values.
2501
 
2502
     FIXME: This seems like an incredibly poor idea.  */
2503
 
2504
  CLEAR_HARD_REG_SET (zero);
2505
  CLEAR_HARD_REG_SET (stack_hard_regs);
2506
  for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
2507
    SET_HARD_REG_BIT (stack_hard_regs, i);
2508
  problem_data->stack_regs = BITMAP_ALLOC (NULL);
2509
  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2510
    {
2511
      COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
2512
      IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
2513
      AND_HARD_REG_SET (used, stack_hard_regs);
2514
      GO_IF_HARD_REG_EQUAL (used, zero, skip);
2515
      bitmap_set_bit (problem_data->stack_regs, i);
2516
    skip:
2517
      ;
2518
    }
2519
#endif
2520
 
2521
  /* We know that earlyclobber_regclass holds no more than
2522
    N_REG_CLASSES elements.  See df_urec_check_earlyclobber.  */
2523
  earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
2524
 
2525
  EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
2526
    {
2527
      df_urec_bb_local_compute (dflow, bb_index);
2528
    }
2529
 
2530
  VEC_free (int, heap, earlyclobber_regclass);
2531
}
2532
 
2533
 
2534
/* Initialize the solution vectors.  */
2535
 
2536
static void
2537
df_urec_init (struct dataflow *dflow, bitmap all_blocks)
2538
{
2539
  unsigned int bb_index;
2540
  bitmap_iterator bi;
2541
 
2542
  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2543
    {
2544
      struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2545
 
2546
      bitmap_copy (bb_info->out, bb_info->gen);
2547
      bitmap_clear (bb_info->in);
2548
    }
2549
}
2550
 
2551
 
2552
/* Or in the stack regs, hard regs and early clobber regs into the the
2553
   ur_in sets of all of the blocks.  */
2554
 
2555
static void
2556
df_urec_local_finalize (struct dataflow *dflow, bitmap all_blocks)
2557
{
2558
  struct df *df = dflow->df;
2559
  struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
2560
  bitmap tmp = BITMAP_ALLOC (NULL);
2561
  bitmap_iterator bi;
2562
  unsigned int bb_index;
2563
  struct df_urec_problem_data *problem_data
2564
    = (struct df_urec_problem_data *) dflow->problem_data;
2565
 
2566
  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2567
    {
2568
      struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2569
      struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
2570
 
2571
      if (bb_index != ENTRY_BLOCK && bb_index != EXIT_BLOCK)
2572
        {
2573
          if (problem_data->earlyclobbers_found)
2574
            bitmap_ior_into (bb_info->in, bb_info->earlyclobber);
2575
 
2576
#ifdef STACK_REGS
2577
          /* We can not use the same stack register for uninitialized
2578
             pseudo-register and another living pseudo-register
2579
             because if the uninitialized pseudo-register dies,
2580
             subsequent pass reg-stack will be confused (it will
2581
             believe that the other register dies).  */
2582
          bitmap_ior_into (bb_info->in, problem_data->stack_regs);
2583
          bitmap_ior_into (bb_info->out, problem_data->stack_regs);
2584
#endif
2585
        }
2586
 
2587
      /* No register may reach a location where it is not used.  Thus
2588
         we trim the rr result to the places where it is used.  */
2589
      bitmap_and_into (bb_info->in, bb_lr_info->in);
2590
      bitmap_and_into (bb_info->out, bb_lr_info->out);
2591
 
2592
#if 1
2593
      /* Hard registers may still stick in the ur_out set, but not
2594
         be in the ur_in set, if their only mention was in a call
2595
         in this block.  This is because a call kills in the lr
2596
         problem but does not kill in the rr problem.  To clean
2597
         this up, we execute the transfer function on the lr_in
2598
         set and then use that to knock bits out of ur_out.  */
2599
      bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
2600
                            bb_info->kill);
2601
      bitmap_and_into (bb_info->out, tmp);
2602
#endif
2603
    }
2604
 
2605
#ifdef STACK_REGS
2606
  BITMAP_FREE (problem_data->stack_regs);
2607
#endif
2608
  BITMAP_FREE (tmp);
2609
}
2610
 
2611
 
2612
/* Confluence function that ignores fake edges.  */
2613
 
2614
static void
2615
df_urec_confluence_n (struct dataflow *dflow, edge e)
2616
{
2617
  bitmap op1 = df_urec_get_bb_info (dflow, e->dest->index)->in;
2618
  bitmap op2 = df_urec_get_bb_info (dflow, e->src->index)->out;
2619
 
2620
  if (e->flags & EDGE_FAKE)
2621
    return;
2622
 
2623
  bitmap_ior_into (op1, op2);
2624
}
2625
 
2626
 
2627
/* Transfer function.  */
2628
 
2629
static bool
2630
df_urec_transfer_function (struct dataflow *dflow, int bb_index)
2631
{
2632
  struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2633
  bitmap in = bb_info->in;
2634
  bitmap out = bb_info->out;
2635
  bitmap gen = bb_info->gen;
2636
  bitmap kill = bb_info->kill;
2637
 
2638
  return bitmap_ior_and_compl (out, gen, in, kill);
2639
}
2640
 
2641
 
2642
/* Free all storage associated with the problem.  */
2643
 
2644
static void
2645
df_urec_free (struct dataflow *dflow)
2646
{
2647
  if (dflow->block_info)
2648
    {
2649
      unsigned int i;
2650
 
2651
      for (i = 0; i < dflow->block_info_size; i++)
2652
        {
2653
          struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, i);
2654
          if (bb_info)
2655
            {
2656
              BITMAP_FREE (bb_info->gen);
2657
              BITMAP_FREE (bb_info->kill);
2658
              BITMAP_FREE (bb_info->in);
2659
              BITMAP_FREE (bb_info->out);
2660
              BITMAP_FREE (bb_info->earlyclobber);
2661
            }
2662
        }
2663
 
2664
      free_alloc_pool (dflow->block_pool);
2665
 
2666
      dflow->block_info_size = 0;
2667
      free (dflow->block_info);
2668
      free (dflow->problem_data);
2669
    }
2670
  free (dflow);
2671
}
2672
 
2673
 
2674
/* Debugging info.  */
2675
 
2676
static void
2677
df_urec_dump (struct dataflow *dflow, FILE *file)
2678
{
2679
  basic_block bb;
2680
 
2681
  if (!dflow->block_info)
2682
    return;
2683
 
2684
  fprintf (file, "Undefined regs:\n");
2685
 
2686
  FOR_ALL_BB (bb)
2687
    {
2688
      struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb->index);
2689
      df_print_bb_index (bb, file);
2690
 
2691
      if (!bb_info->in)
2692
        continue;
2693
 
2694
      fprintf (file, "  in  \t");
2695
      dump_bitmap (file, bb_info->in);
2696
      fprintf (file, "  gen \t");
2697
      dump_bitmap (file, bb_info->gen);
2698
      fprintf (file, "  kill\t");
2699
      dump_bitmap (file, bb_info->kill);
2700
      fprintf (file, "  ec\t");
2701
      dump_bitmap (file, bb_info->earlyclobber);
2702
      fprintf (file, "  out \t");
2703
      dump_bitmap (file, bb_info->out);
2704
    }
2705
}
2706
 
2707
/* All of the information associated with every instance of the problem.  */
2708
 
2709
static struct df_problem problem_UREC =
2710
{
2711
  DF_UREC,                    /* Problem id.  */
2712
  DF_FORWARD,                 /* Direction.  */
2713
  df_urec_alloc,              /* Allocate the problem specific data.  */
2714
  NULL,                       /* Reset global information.  */
2715
  df_urec_free_bb_info,       /* Free basic block info.  */
2716
  df_urec_local_compute,      /* Local compute function.  */
2717
  df_urec_init,               /* Init the solution specific data.  */
2718
  df_iterative_dataflow,      /* Iterative solver.  */
2719
  NULL,                       /* Confluence operator 0.  */
2720
  df_urec_confluence_n,       /* Confluence operator n.  */
2721
  df_urec_transfer_function,  /* Transfer function.  */
2722
  df_urec_local_finalize,     /* Finalize function.  */
2723
  df_urec_free,               /* Free all of the problem information.  */
2724
  df_urec_dump,               /* Debugging.  */
2725
  df_lr_add_problem,          /* Dependent problem.  */
2726
 
2727
};
2728
 
2729
 
2730
/* Create a new DATAFLOW instance and add it to an existing instance
2731
   of DF.  The returned structure is what is used to get at the
2732
   solution.  */
2733
 
2734
struct dataflow *
2735
df_urec_add_problem (struct df *df, int flags)
2736
{
2737
  return df_add_problem (df, &problem_UREC, flags);
2738
}
2739
 
2740
 
2741
 
2742
/*----------------------------------------------------------------------------
2743
   CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
2744
 
2745
   Link either the defs to the uses and / or the uses to the defs.
2746
 
2747
   These problems are set up like the other dataflow problems so that
2748
   they nicely fit into the framework.  They are much simpler and only
2749
   involve a single traversal of instructions and an examination of
2750
   the reaching defs information (the dependent problem).
2751
----------------------------------------------------------------------------*/
2752
 
2753
/* Create def-use or use-def chains.  */
2754
 
2755
static void
2756
df_chain_alloc (struct dataflow *dflow,
2757
                bitmap blocks_to_rescan ATTRIBUTE_UNUSED,
2758
                bitmap all_blocks ATTRIBUTE_UNUSED)
2759
 
2760
{
2761
  struct df *df = dflow->df;
2762
  unsigned int i;
2763
 
2764
  /* Wholesale destruction of the old chains.  */
2765
  if (dflow->block_pool)
2766
    free_alloc_pool (dflow->block_pool);
2767
 
2768
  dflow->block_pool = create_alloc_pool ("df_chain_chain_block pool",
2769
                                         sizeof (struct df_link), 100);
2770
 
2771
  if (dflow->flags & DF_DU_CHAIN)
2772
    {
2773
      if (!df->def_info.refs_organized)
2774
        df_reorganize_refs (&df->def_info);
2775
 
2776
      /* Clear out the pointers from the refs.  */
2777
      for (i = 0; i < DF_DEFS_SIZE (df); i++)
2778
        {
2779
          struct df_ref *ref = df->def_info.refs[i];
2780
          DF_REF_CHAIN (ref) = NULL;
2781
        }
2782
    }
2783
 
2784
  if (dflow->flags & DF_UD_CHAIN)
2785
    {
2786
      if (!df->use_info.refs_organized)
2787
        df_reorganize_refs (&df->use_info);
2788
      for (i = 0; i < DF_USES_SIZE (df); i++)
2789
        {
2790
          struct df_ref *ref = df->use_info.refs[i];
2791
          DF_REF_CHAIN (ref) = NULL;
2792
        }
2793
    }
2794
}
2795
 
2796
 
2797
/* Reset all def_use and use_def chains in INSN.  */
2798
 
2799
static void
2800
df_chain_insn_reset (struct dataflow *dflow, rtx insn)
2801
{
2802
  struct df *df = dflow->df;
2803
  unsigned int uid = INSN_UID (insn);
2804
  struct df_insn_info *insn_info = NULL;
2805
  struct df_ref *ref;
2806
 
2807
  if (uid < df->insns_size)
2808
    insn_info = DF_INSN_UID_GET (df, uid);
2809
 
2810
  if (insn_info)
2811
    {
2812
      if (dflow->flags & DF_DU_CHAIN)
2813
        {
2814
          ref = insn_info->defs;
2815
          while (ref)
2816
            {
2817
              ref->chain = NULL;
2818
              ref = ref->next_ref;
2819
            }
2820
        }
2821
 
2822
      if (dflow->flags & DF_UD_CHAIN)
2823
        {
2824
          ref = insn_info->uses;
2825
          while (ref)
2826
            {
2827
              ref->chain = NULL;
2828
              ref = ref->next_ref;
2829
            }
2830
        }
2831
    }
2832
}
2833
 
2834
 
2835
/* Reset all def_use and use_def chains in basic block.  */
2836
 
2837
static void
2838
df_chain_bb_reset (struct dataflow *dflow, unsigned int bb_index)
2839
{
2840
  struct df *df = dflow->df;
2841
  rtx insn;
2842
  basic_block bb = BASIC_BLOCK (bb_index);
2843
 
2844
  /* Some one deleted the basic block out from under us.  */
2845
  if (!bb)
2846
    return;
2847
 
2848
  FOR_BB_INSNS (bb, insn)
2849
    {
2850
      if (INSN_P (insn))
2851
        {
2852
          /* Record defs within INSN.  */
2853
          df_chain_insn_reset (dflow, insn);
2854
        }
2855
    }
2856
 
2857
  /* Get rid of any chains in artificial uses or defs.  */
2858
  if (dflow->flags & DF_DU_CHAIN)
2859
    {
2860
      struct df_ref *def;
2861
      def = df_get_artificial_defs (df, bb_index);
2862
      while (def)
2863
        {
2864
          def->chain = NULL;
2865
          def = def->next_ref;
2866
        }
2867
    }
2868
 
2869
  if (dflow->flags & DF_UD_CHAIN)
2870
    {
2871
      struct df_ref *use;
2872
      use = df_get_artificial_uses (df, bb_index);
2873
      while (use)
2874
        {
2875
          use->chain = NULL;
2876
          use = use->next_ref;
2877
        }
2878
    }
2879
}
2880
 
2881
 
2882
/* Reset all of the chains when the set of basic blocks changes.  */
2883
 
2884
 
2885
static void
2886
df_chain_reset (struct dataflow *dflow, bitmap blocks_to_clear)
2887
{
2888
  bitmap_iterator bi;
2889
  unsigned int bb_index;
2890
 
2891
  EXECUTE_IF_SET_IN_BITMAP (blocks_to_clear, 0, bb_index, bi)
2892
    {
2893
      df_chain_bb_reset (dflow, bb_index);
2894
    }
2895
 
2896
  free_alloc_pool (dflow->block_pool);
2897
  dflow->block_pool = NULL;
2898
}
2899
 
2900
 
2901
/* Create the chains for a list of USEs.  */
2902
 
2903
static void
2904
df_chain_create_bb_process_use (struct dataflow *dflow,
2905
                                bitmap local_rd,
2906
                                struct df_ref *use,
2907
                                enum df_ref_flags top_flag)
2908
{
2909
  struct df *df = dflow->df;
2910
  bitmap_iterator bi;
2911
  unsigned int def_index;
2912
 
2913
  while (use)
2914
    {
2915
      /* Do not want to go through this for an uninitialized var.  */
2916
      unsigned int uregno = DF_REF_REGNO (use);
2917
      int count = DF_REG_DEF_GET (df, uregno)->n_refs;
2918
      if (count)
2919
        {
2920
          if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2921
            {
2922
              unsigned int first_index = DF_REG_DEF_GET (df, uregno)->begin;
2923
              unsigned int last_index = first_index + count - 1;
2924
 
2925
              EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2926
                {
2927
                  struct df_ref *def;
2928
                  if (def_index > last_index)
2929
                    break;
2930
 
2931
                  def = DF_DEFS_GET (df, def_index);
2932
                  if (dflow->flags & DF_DU_CHAIN)
2933
                    df_chain_create (dflow, def, use);
2934
                  if (dflow->flags & DF_UD_CHAIN)
2935
                    df_chain_create (dflow, use, def);
2936
                }
2937
            }
2938
        }
2939
      use = use->next_ref;
2940
    }
2941
}
2942
 
2943
/* Reset the storage pool that the def-use or use-def chains have been
2944
   allocated in. We do not need to re adjust the pointers in the refs,
2945
   these have already been clean out.*/
2946
 
2947
/* Create chains from reaching defs bitmaps for basic block BB.  */
2948
static void
2949
df_chain_create_bb (struct dataflow *dflow,
2950
                    struct dataflow *rd_dflow,
2951
                    unsigned int bb_index)
2952
{
2953
  basic_block bb = BASIC_BLOCK (bb_index);
2954
  struct df_rd_bb_info *bb_info = df_rd_get_bb_info (rd_dflow, bb_index);
2955
  rtx insn;
2956
  bitmap cpy = BITMAP_ALLOC (NULL);
2957
  struct df *df = dflow->df;
2958
  struct df_ref *def;
2959
 
2960
  bitmap_copy (cpy, bb_info->in);
2961
 
2962
  /* Since we are going forwards, process the artificial uses first
2963
     then the artificial defs second.  */
2964
 
2965
#ifdef EH_USES
2966
  /* Create the chains for the artificial uses from the EH_USES at the
2967
     beginning of the block.  */
2968
  df_chain_create_bb_process_use (dflow, cpy,
2969
                                  df_get_artificial_uses (df, bb->index),
2970
                                  DF_REF_AT_TOP);
2971
#endif
2972
 
2973
  for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2974
    if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
2975
      {
2976
        unsigned int dregno = DF_REF_REGNO (def);
2977
        if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
2978
          bitmap_clear_range (cpy,
2979
                              DF_REG_DEF_GET (df, dregno)->begin,
2980
                              DF_REG_DEF_GET (df, dregno)->n_refs);
2981
        bitmap_set_bit (cpy, DF_REF_ID (def));
2982
      }
2983
 
2984
  /* Process the regular instructions next.  */
2985
  FOR_BB_INSNS (bb, insn)
2986
    {
2987
      struct df_ref *def;
2988
      unsigned int uid = INSN_UID (insn);
2989
 
2990
      if (!INSN_P (insn))
2991
        continue;
2992
 
2993
      /* Now scan the uses and link them up with the defs that remain
2994
         in the cpy vector.  */
2995
 
2996
      df_chain_create_bb_process_use (dflow, cpy,
2997
                                     DF_INSN_UID_USES (df, uid), 0);
2998
 
2999
      /* Since we are going forwards, process the defs second.  This
3000
         pass only changes the bits in cpy.  */
3001
      for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
3002
        {
3003
          unsigned int dregno = DF_REF_REGNO (def);
3004
          if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
3005
            bitmap_clear_range (cpy,
3006
                                DF_REG_DEF_GET (df, dregno)->begin,
3007
                                DF_REG_DEF_GET (df, dregno)->n_refs);
3008
          if (!(DF_REF_FLAGS (def)
3009
                 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
3010
            bitmap_set_bit (cpy, DF_REF_ID (def));
3011
        }
3012
    }
3013
 
3014
  /* Create the chains for the artificial uses of the hard registers
3015
     at the end of the block.  */
3016
  df_chain_create_bb_process_use (dflow, cpy,
3017
                                  df_get_artificial_uses (df, bb->index), 0);
3018
}
3019
 
3020
/* Create def-use chains from reaching use bitmaps for basic blocks
3021
   in BLOCKS.  */
3022
 
3023
static void
3024
df_chain_finalize (struct dataflow *dflow, bitmap all_blocks)
3025
{
3026
  unsigned int bb_index;
3027
  bitmap_iterator bi;
3028
  struct df *df = dflow->df;
3029
  struct dataflow *rd_dflow = df->problems_by_index [DF_RD];
3030
 
3031
  EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
3032
    {
3033
      df_chain_create_bb (dflow, rd_dflow, bb_index);
3034
    }
3035
}
3036
 
3037
 
3038
/* Free all storage associated with the problem.  */
3039
 
3040
static void
3041
df_chain_free (struct dataflow *dflow)
3042
{
3043
  free_alloc_pool (dflow->block_pool);
3044
  free (dflow);
3045
}
3046
 
3047
 
3048
/* Debugging info.  */
3049
 
3050
static void
3051
df_chains_dump (struct dataflow *dflow, FILE *file)
3052
{
3053
  struct df *df = dflow->df;
3054
  unsigned int j;
3055
 
3056
  if (dflow->flags & DF_DU_CHAIN)
3057
    {
3058
      fprintf (file, "Def-use chains:\n");
3059
      for (j = 0; j < df->def_info.bitmap_size; j++)
3060
        {
3061
          struct df_ref *def = DF_DEFS_GET (df, j);
3062
          if (def)
3063
            {
3064
              fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
3065
                       j, DF_REF_BBNO (def),
3066
                       DF_REF_INSN (def) ?
3067
                       DF_INSN_LUID (df, DF_REF_INSN (def)):
3068
                       -1,
3069
                       DF_REF_INSN (def) ? DF_REF_INSN_UID (def) : -1,
3070
                       DF_REF_REGNO (def));
3071
              if (def->flags & DF_REF_READ_WRITE)
3072
                fprintf (file, "read/write ");
3073
              df_chain_dump (DF_REF_CHAIN (def), file);
3074
              fprintf (file, "\n");
3075
            }
3076
        }
3077
    }
3078
 
3079
  if (dflow->flags & DF_UD_CHAIN)
3080
    {
3081
      fprintf (file, "Use-def chains:\n");
3082
      for (j = 0; j < df->use_info.bitmap_size; j++)
3083
        {
3084
          struct df_ref *use = DF_USES_GET (df, j);
3085
          if (use)
3086
            {
3087
              fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
3088
                       j, DF_REF_BBNO (use),
3089
                       DF_REF_INSN (use) ?
3090
                       DF_INSN_LUID (df, DF_REF_INSN (use))
3091
                       : -1,
3092
                       DF_REF_INSN (DF_USES_GET (df, j)) ?
3093
                       DF_REF_INSN_UID (DF_USES_GET (df,j))
3094
                       : -1,
3095
                       DF_REF_REGNO (use));
3096
              if (use->flags & DF_REF_READ_WRITE)
3097
                fprintf (file, "read/write ");
3098
              if (use->flags & DF_REF_STRIPPED)
3099
                fprintf (file, "stripped ");
3100
              if (use->flags & DF_REF_IN_NOTE)
3101
                fprintf (file, "note ");
3102
              df_chain_dump (DF_REF_CHAIN (use), file);
3103
              fprintf (file, "\n");
3104
            }
3105
        }
3106
    }
3107
}
3108
 
3109
 
3110
static struct df_problem problem_CHAIN =
3111
{
3112
  DF_CHAIN,                   /* Problem id.  */
3113
  DF_NONE,                    /* Direction.  */
3114
  df_chain_alloc,             /* Allocate the problem specific data.  */
3115
  df_chain_reset,             /* Reset global information.  */
3116
  NULL,                       /* Free basic block info.  */
3117
  NULL,                       /* Local compute function.  */
3118
  NULL,                       /* Init the solution specific data.  */
3119
  NULL,                       /* Iterative solver.  */
3120
  NULL,                       /* Confluence operator 0.  */
3121
  NULL,                       /* Confluence operator n.  */
3122
  NULL,                       /* Transfer function.  */
3123
  df_chain_finalize,          /* Finalize function.  */
3124
  df_chain_free,              /* Free all of the problem information.  */
3125
  df_chains_dump,             /* Debugging.  */
3126
  df_rd_add_problem,          /* Dependent problem.  */
3127
 
3128
};
3129
 
3130
 
3131
/* Create a new DATAFLOW instance and add it to an existing instance
3132
   of DF.  The returned structure is what is used to get at the
3133
   solution.  */
3134
 
3135
struct dataflow *
3136
df_chain_add_problem (struct df *df, int flags)
3137
{
3138
  return df_add_problem (df, &problem_CHAIN, flags);
3139
}
3140
 
3141
 
3142
/*----------------------------------------------------------------------------
3143
   REGISTER INFORMATION
3144
 
3145
   This pass properly computes REG_DEAD and REG_UNUSED notes.
3146
 
3147
   If the DF_RI_LIFE flag is set the following vectors containing
3148
   information about register usage are properly set: REG_N_REFS,
3149
   REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH, REG_N_CALLS_CROSSED,
3150
   REG_N_THROWING_CALLS_CROSSED and REG_BASIC_BLOCK.
3151
 
3152
   ----------------------------------------------------------------------------*/
3153
 
3154
#ifdef REG_DEAD_DEBUGGING
3155
static void
3156
print_note (char *prefix, rtx insn, rtx note)
3157
{
3158
  fprintf (stderr, "%s %d ", prefix, INSN_UID (insn));
3159
  print_rtl (stderr, note);
3160
  fprintf (stderr, "\n");
3161
}
3162
#endif
3163
 
3164
/* Allocate the lifetime information.  */
3165
 
3166
static void
3167
df_ri_alloc (struct dataflow *dflow,
3168
             bitmap blocks_to_rescan ATTRIBUTE_UNUSED,
3169
             bitmap all_blocks ATTRIBUTE_UNUSED)
3170
{
3171
  int i;
3172
  struct df *df = dflow->df;
3173
 
3174
  if (dflow->flags & DF_RI_LIFE)
3175
    {
3176
      max_regno = max_reg_num ();
3177
      allocate_reg_info (max_regno, FALSE, FALSE);
3178
 
3179
      /* Reset all the data we'll collect.  */
3180
      for (i = 0; i < max_regno; i++)
3181
        {
3182
          REG_N_SETS (i) = DF_REG_DEF_COUNT (df, i);
3183
          REG_N_REFS (i) = DF_REG_USE_COUNT (df, i) + REG_N_SETS (i);
3184
          REG_N_DEATHS (i) = 0;
3185
          REG_N_CALLS_CROSSED (i) = 0;
3186
          REG_N_THROWING_CALLS_CROSSED (i) = 0;
3187
          REG_LIVE_LENGTH (i) = 0;
3188
          REG_FREQ (i) = 0;
3189
          REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3190
        }
3191
    }
3192
}
3193
 
3194
 
3195
/* After reg-stack, the x86 floating point stack regs are difficult to
3196
   analyze because of all of the pushes, pops and rotations.  Thus, we
3197
   just leave the notes alone. */
3198
 
3199
static inline bool
3200
df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED)
3201
{
3202
#ifdef STACK_REGS
3203
  return (regstack_completed
3204
          && IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG));
3205
#else
3206
  return false;
3207
#endif
3208
}
3209
 
3210
 
3211
/* Remove all of the REG_DEAD or REG_UNUSED notes from INSN.  */
3212
 
3213
static void
3214
df_kill_notes (rtx insn, int flags)
3215
{
3216
  rtx *pprev = &REG_NOTES (insn);
3217
  rtx link = *pprev;
3218
 
3219
  while (link)
3220
    {
3221
      switch (REG_NOTE_KIND (link))
3222
        {
3223
        case REG_DEAD:
3224
          if (flags & DF_RI_LIFE)
3225
            if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
3226
              REG_N_DEATHS (REGNO (XEXP (link, 0)))++;
3227
 
3228
          /* Fallthru */
3229
        case REG_UNUSED:
3230
          if (!df_ignore_stack_reg (REGNO (XEXP (link, 0))))
3231
            {
3232
              rtx next = XEXP (link, 1);
3233
#ifdef REG_DEAD_DEBUGGING
3234
              print_note ("deleting: ", insn, link);
3235
#endif
3236
              free_EXPR_LIST_node (link);
3237
              *pprev = link = next;
3238
            }
3239
          break;
3240
 
3241
        default:
3242
          pprev = &XEXP (link, 1);
3243
          link = *pprev;
3244
          break;
3245
        }
3246
    }
3247
}
3248
 
3249
 
3250
/* Set the REG_UNUSED notes for the multiword hardreg defs in INSN
3251
   based on the bits in LIVE.  Do not generate notes for registers in
3252
   artificial uses.  DO_NOT_GEN is updated so that REG_DEAD notes are
3253
   not generated if the reg is both read and written by the
3254
   instruction.
3255
*/
3256
 
3257
static void
3258
df_set_unused_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
3259
                            bitmap live, bitmap do_not_gen,
3260
                            bitmap artificial_uses, int flags)
3261
{
3262
  bool all_dead = true;
3263
  struct df_link *regs = mws->regs;
3264
  unsigned int regno = DF_REF_REGNO (regs->ref);
3265
 
3266
#ifdef REG_DEAD_DEBUGGING
3267
  fprintf (stderr, "mw unused looking at %d\n", DF_REF_REGNO (regs->ref));
3268
  df_ref_debug (regs->ref, stderr);
3269
#endif
3270
  while (regs)
3271
    {
3272
      unsigned int regno = DF_REF_REGNO (regs->ref);
3273
      if ((bitmap_bit_p (live, regno))
3274
          || bitmap_bit_p (artificial_uses, regno))
3275
        {
3276
          all_dead = false;
3277
          break;
3278
        }
3279
      regs = regs->next;
3280
    }
3281
 
3282
  if (all_dead)
3283
    {
3284
      struct df_link *regs = mws->regs;
3285
      rtx note = alloc_EXPR_LIST (REG_UNUSED, *DF_REF_LOC (regs->ref),
3286
                                  REG_NOTES (insn));
3287
      REG_NOTES (insn) = note;
3288
#ifdef REG_DEAD_DEBUGGING
3289
      print_note ("adding 1: ", insn, note);
3290
#endif
3291
      bitmap_set_bit (do_not_gen, regno);
3292
      /* Only do this if the value is totally dead.  */
3293
      if (flags & DF_RI_LIFE)
3294
        {
3295
          REG_N_DEATHS (regno) ++;
3296
          REG_LIVE_LENGTH (regno)++;
3297
        }
3298
    }
3299
  else
3300
    {
3301
      struct df_link *regs = mws->regs;
3302
      while (regs)
3303
        {
3304
          struct df_ref *ref = regs->ref;
3305
 
3306
          regno = DF_REF_REGNO (ref);
3307
          if ((!bitmap_bit_p (live, regno))
3308
              && (!bitmap_bit_p (artificial_uses, regno)))
3309
            {
3310
              rtx note = alloc_EXPR_LIST (REG_UNUSED, regno_reg_rtx[regno],
3311
                                          REG_NOTES (insn));
3312
              REG_NOTES (insn) = note;
3313
#ifdef REG_DEAD_DEBUGGING
3314
              print_note ("adding 2: ", insn, note);
3315
#endif
3316
            }
3317
          bitmap_set_bit (do_not_gen, regno);
3318
          regs = regs->next;
3319
        }
3320
    }
3321
}
3322
 
3323
 
3324
/* Set the REG_DEAD notes for the multiword hardreg use in INSN based
3325
   on the bits in LIVE.  DO_NOT_GEN is used to keep REG_DEAD notes
3326
   from being set if the instruction both reads and writes the
3327
   register.  */
3328
 
3329
static void
3330
df_set_dead_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
3331
                          bitmap live, bitmap do_not_gen,
3332
                          bitmap artificial_uses, int flags)
3333
{
3334
  bool all_dead = true;
3335
  struct df_link *regs = mws->regs;
3336
  unsigned int regno = DF_REF_REGNO (regs->ref);
3337
 
3338
#ifdef REG_DEAD_DEBUGGING
3339
  fprintf (stderr, "mw looking at %d\n", DF_REF_REGNO (regs->ref));
3340
  df_ref_debug (regs->ref, stderr);
3341
#endif
3342
  while (regs)
3343
    {
3344
      unsigned int regno = DF_REF_REGNO (regs->ref);
3345
      if ((bitmap_bit_p (live, regno))
3346
          || bitmap_bit_p (artificial_uses, regno))
3347
        {
3348
          all_dead = false;
3349
          break;
3350
        }
3351
      regs = regs->next;
3352
    }
3353
 
3354
  if (all_dead)
3355
    {
3356
      if (!bitmap_bit_p (do_not_gen, regno))
3357
        {
3358
          /* Add a dead note for the entire multi word register.  */
3359
          struct df_link *regs = mws->regs;
3360
          rtx note = alloc_EXPR_LIST (REG_DEAD, *DF_REF_LOC (regs->ref),
3361
                                      REG_NOTES (insn));
3362
          REG_NOTES (insn) = note;
3363
#ifdef REG_DEAD_DEBUGGING
3364
          print_note ("adding 1: ", insn, note);
3365
#endif
3366
 
3367
          if (flags & DF_RI_LIFE)
3368
            {
3369
              struct df_link *regs = mws->regs;
3370
              while (regs)
3371
                {
3372
                  struct df_ref *ref = regs->ref;
3373
                  regno = DF_REF_REGNO (ref);
3374
                  REG_N_DEATHS (regno)++;
3375
                  regs = regs->next;
3376
                }
3377
            }
3378
        }
3379
    }
3380
  else
3381
    {
3382
      struct df_link *regs = mws->regs;
3383
      while (regs)
3384
        {
3385
          struct df_ref *ref = regs->ref;
3386
 
3387
          regno = DF_REF_REGNO (ref);
3388
          if ((!bitmap_bit_p (live, regno))
3389
              && (!bitmap_bit_p (artificial_uses, regno))
3390
              && (!bitmap_bit_p (do_not_gen, regno)))
3391
            {
3392
              rtx note = alloc_EXPR_LIST (REG_DEAD, regno_reg_rtx[regno],
3393
                                          REG_NOTES (insn));
3394
              REG_NOTES (insn) = note;
3395
              if (flags & DF_RI_LIFE)
3396
                REG_N_DEATHS (regno)++;
3397
#ifdef REG_DEAD_DEBUGGING
3398
              print_note ("adding 2: ", insn, note);
3399
#endif
3400
            }
3401
 
3402
          regs = regs->next;
3403
        }
3404
    }
3405
}
3406
 
3407
 
3408
/* Create a REG_UNUSED note if necessary for DEF in INSN updating LIVE
3409
   and DO_NOT_GEN.  Do not generate notes for registers in artificial
3410
   uses.  */
3411
 
3412
static void
3413
df_create_unused_note (basic_block bb, rtx insn, struct df_ref *def,
3414
                       bitmap live, bitmap do_not_gen, bitmap artificial_uses,
3415
                       bitmap local_live, bitmap local_processed,
3416
                       int flags, int luid)
3417
{
3418
  unsigned int dregno = DF_REF_REGNO (def);
3419
 
3420
#ifdef REG_DEAD_DEBUGGING
3421
  fprintf (stderr, "  regular looking at def ");
3422
  df_ref_debug (def, stderr);
3423
#endif
3424
 
3425
  if (bitmap_bit_p (live, dregno))
3426
    {
3427
      if (flags & DF_RI_LIFE)
3428
        {
3429
          /* If we have seen this regno, then it has already been
3430
             processed correctly with the per insn increment.  If we
3431
             have not seen it we need to add the length from here to
3432
             the end of the block to the live length.  */
3433
          if (bitmap_bit_p (local_processed, dregno))
3434
            {
3435
              if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
3436
                bitmap_clear_bit (local_live, dregno);
3437
            }
3438
          else
3439
            {
3440
              bitmap_set_bit (local_processed, dregno);
3441
              REG_LIVE_LENGTH (dregno) += luid;
3442
            }
3443
        }
3444
    }
3445
  else if ((!(DF_REF_FLAGS (def) & DF_REF_MW_HARDREG))
3446
            && (!bitmap_bit_p (artificial_uses, dregno))
3447
            && (!df_ignore_stack_reg (dregno)))
3448
    {
3449
      rtx reg = GET_CODE (*DF_REF_LOC (def)) == SUBREG ?
3450
        SUBREG_REG (*DF_REF_LOC (def)) : *DF_REF_LOC (def);
3451
      rtx note = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3452
      REG_NOTES (insn) = note;
3453
#ifdef REG_DEAD_DEBUGGING
3454
      print_note ("adding 3: ", insn, note);
3455
#endif
3456
      if (flags & DF_RI_LIFE)
3457
        {
3458
          REG_N_DEATHS (dregno) ++;
3459
          REG_LIVE_LENGTH (dregno)++;
3460
        }
3461
    }
3462
 
3463
  if ((flags & DF_RI_LIFE) && (dregno >= FIRST_PSEUDO_REGISTER))
3464
    {
3465
      REG_FREQ (dregno) += REG_FREQ_FROM_BB (bb);
3466
      if (REG_BASIC_BLOCK (dregno) == REG_BLOCK_UNKNOWN)
3467
        REG_BASIC_BLOCK (dregno) = bb->index;
3468
      else if (REG_BASIC_BLOCK (dregno) != bb->index)
3469
        REG_BASIC_BLOCK (dregno) = REG_BLOCK_GLOBAL;
3470
    }
3471
 
3472
  if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER + DF_REF_MAY_CLOBBER)))
3473
    bitmap_set_bit (do_not_gen, dregno);
3474
 
3475
  /* Kill this register if it is not a subreg store.  */
3476
  if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
3477
    bitmap_clear_bit (live, dregno);
3478
}
3479
 
3480
 
3481
/* Recompute the REG_DEAD and REG_UNUSED notes and compute register
3482
   info: lifetime, bb, and number of defs and uses for basic block
3483
   BB.  The three bitvectors are scratch regs used here.  */
3484
 
3485
static void
3486
df_ri_bb_compute (struct dataflow *dflow, unsigned int bb_index,
3487
                  bitmap live, bitmap do_not_gen, bitmap artificial_uses,
3488
                  bitmap local_live, bitmap local_processed, bitmap setjumps_crossed)
3489
{
3490
  struct df *df = dflow->df;
3491
  basic_block bb = BASIC_BLOCK (bb_index);
3492
  rtx insn;
3493
  struct df_ref *def;
3494
  struct df_ref *use;
3495
  int luid = 0;
3496
 
3497
  bitmap_copy (live, df_get_live_out (df, bb));
3498
  bitmap_clear (artificial_uses);
3499
 
3500
  if (dflow->flags & DF_RI_LIFE)
3501
    {
3502
      /* Process the regs live at the end of the block.  Mark them as
3503
         not local to any one basic block.  */
3504
      bitmap_iterator bi;
3505
      unsigned int regno;
3506
      EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
3507
        REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3508
    }
3509
 
3510
  /* Process the artificial defs and uses at the bottom of the block
3511
     to begin processing.  */
3512
  for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
3513
    if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3514
      bitmap_clear_bit (live, DF_REF_REGNO (def));
3515
 
3516
  for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
3517
    if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3518
      {
3519
        unsigned int regno = DF_REF_REGNO (use);
3520
        bitmap_set_bit (live, regno);
3521
 
3522
        /* Notes are not generated for any of the artificial registers
3523
           at the bottom of the block.  */
3524
        bitmap_set_bit (artificial_uses, regno);
3525
      }
3526
 
3527
  FOR_BB_INSNS_REVERSE (bb, insn)
3528
    {
3529
      unsigned int uid = INSN_UID (insn);
3530
      unsigned int regno;
3531
      bitmap_iterator bi;
3532
      struct df_mw_hardreg *mws;
3533
 
3534
      if (!INSN_P (insn))
3535
        continue;
3536
 
3537
      if (dflow->flags & DF_RI_LIFE)
3538
        {
3539
          /* Increment the live_length for all of the registers that
3540
             are are referenced in this block and live at this
3541
             particular point.  */
3542
          bitmap_iterator bi;
3543
          unsigned int regno;
3544
          EXECUTE_IF_SET_IN_BITMAP (local_live, 0, regno, bi)
3545
            {
3546
              REG_LIVE_LENGTH (regno)++;
3547
            }
3548
          luid++;
3549
        }
3550
 
3551
      bitmap_clear (do_not_gen);
3552
      df_kill_notes (insn, dflow->flags);
3553
 
3554
      /* Process the defs.  */
3555
      if (CALL_P (insn))
3556
        {
3557
          if (dflow->flags & DF_RI_LIFE)
3558
            {
3559
              bool can_throw = can_throw_internal (insn);
3560
              bool set_jump = (find_reg_note (insn, REG_SETJMP, NULL) != NULL);
3561
              EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
3562
                {
3563
                  REG_N_CALLS_CROSSED (regno)++;
3564
                  if (can_throw)
3565
                    REG_N_THROWING_CALLS_CROSSED (regno)++;
3566
 
3567
                  /* We have a problem with any pseudoreg that lives
3568
                     across the setjmp.  ANSI says that if a user
3569
                     variable does not change in value between the
3570
                     setjmp and the longjmp, then the longjmp
3571
                     preserves it.  This includes longjmp from a place
3572
                     where the pseudo appears dead.  (In principle,
3573
                     the value still exists if it is in scope.)  If
3574
                     the pseudo goes in a hard reg, some other value
3575
                     may occupy that hard reg where this pseudo is
3576
                     dead, thus clobbering the pseudo.  Conclusion:
3577
                     such a pseudo must not go in a hard reg.  */
3578
                  if (set_jump && regno >= FIRST_PSEUDO_REGISTER)
3579
                    bitmap_set_bit (setjumps_crossed, regno);
3580
                }
3581
            }
3582
 
3583
          /* We only care about real sets for calls.  Clobbers only
3584
             may clobber and cannot be depended on.  */
3585
          for (mws = DF_INSN_UID_MWS (df, uid); mws; mws = mws->next)
3586
            {
3587
              if ((mws->type == DF_REF_REG_DEF)
3588
                  && !df_ignore_stack_reg (REGNO (mws->mw_reg)))
3589
                df_set_unused_notes_for_mw (insn, mws, live, do_not_gen,
3590
                                            artificial_uses, dflow->flags);
3591
            }
3592
 
3593
          /* All of the defs except the return value are some sort of
3594
             clobber.  This code is for the return.  */
3595
          for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
3596
            if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
3597
              df_create_unused_note (bb, insn, def, live, do_not_gen,
3598
                                     artificial_uses, local_live,
3599
                                     local_processed, dflow->flags, luid);
3600
 
3601
        }
3602
      else
3603
        {
3604
          /* Regular insn.  */
3605
          for (mws = DF_INSN_UID_MWS (df, uid); mws; mws = mws->next)
3606
            {
3607
              if (mws->type == DF_REF_REG_DEF)
3608
                df_set_unused_notes_for_mw (insn, mws, live, do_not_gen,
3609
                                            artificial_uses, dflow->flags);
3610
            }
3611
 
3612
          for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
3613
            df_create_unused_note (bb, insn, def, live, do_not_gen,
3614
                                   artificial_uses, local_live,
3615
                                   local_processed, dflow->flags, luid);
3616
        }
3617
 
3618
      /* Process the uses.  */
3619
      for (mws = DF_INSN_UID_MWS (df, uid); mws; mws = mws->next)
3620
        {
3621
          if ((mws->type != DF_REF_REG_DEF)
3622
              && !df_ignore_stack_reg (REGNO (mws->mw_reg)))
3623
            df_set_dead_notes_for_mw (insn, mws, live, do_not_gen,
3624
                                      artificial_uses, dflow->flags);
3625
        }
3626
 
3627
      for (use = DF_INSN_UID_USES (df, uid); use; use = use->next_ref)
3628
        {
3629
          unsigned int uregno = DF_REF_REGNO (use);
3630
 
3631
          if ((dflow->flags & DF_RI_LIFE) && (uregno >= FIRST_PSEUDO_REGISTER))
3632
            {
3633
              REG_FREQ (uregno) += REG_FREQ_FROM_BB (bb);
3634
              if (REG_BASIC_BLOCK (uregno) == REG_BLOCK_UNKNOWN)
3635
                REG_BASIC_BLOCK (uregno) = bb->index;
3636
              else if (REG_BASIC_BLOCK (uregno) != bb->index)
3637
                REG_BASIC_BLOCK (uregno) = REG_BLOCK_GLOBAL;
3638
            }
3639
 
3640
#ifdef REG_DEAD_DEBUGGING
3641
          fprintf (stderr, "  regular looking at use ");
3642
          df_ref_debug (use, stderr);
3643
#endif
3644
          if (!bitmap_bit_p (live, uregno))
3645
            {
3646
              if ( (!(DF_REF_FLAGS (use) & DF_REF_MW_HARDREG))
3647
                   && (!bitmap_bit_p (do_not_gen, uregno))
3648
                   && (!bitmap_bit_p (artificial_uses, uregno))
3649
                   && (!(DF_REF_FLAGS (use) & DF_REF_READ_WRITE))
3650
                   && (!df_ignore_stack_reg (uregno)))
3651
                {
3652
                  rtx reg = GET_CODE (*DF_REF_LOC (use)) == SUBREG ?
3653
                    SUBREG_REG (*DF_REF_LOC (use)) : *DF_REF_LOC (use);
3654
                  rtx note = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
3655
                  REG_NOTES (insn) = note;
3656
                  if (dflow->flags & DF_RI_LIFE)
3657
                    REG_N_DEATHS (uregno)++;
3658
 
3659
#ifdef REG_DEAD_DEBUGGING
3660
                  print_note ("adding 4: ", insn, note);
3661
#endif
3662
                }
3663
              /* This register is now live.  */
3664
              bitmap_set_bit (live, uregno);
3665
 
3666
              if (dflow->flags & DF_RI_LIFE)
3667
                {
3668
                  /* If we have seen this regno, then it has already
3669
                     been processed correctly with the per insn
3670
                     increment.  If we have not seen it we set the bit
3671
                     so that begins to get processed locally.  Note
3672
                     that we don't even get here if the variable was
3673
                     live at the end of the block since just a ref
3674
                     inside the block does not effect the
3675
                     calculations.  */
3676
                  REG_LIVE_LENGTH (uregno) ++;
3677
                  bitmap_set_bit (local_live, uregno);
3678
                  bitmap_set_bit (local_processed, uregno);
3679
                }
3680
            }
3681
        }
3682
    }
3683
 
3684
  if (dflow->flags & DF_RI_LIFE)
3685
    {
3686
      /* Add the length of the block to all of the registers that were
3687
         not referenced, but still live in this block.  */
3688
      bitmap_iterator bi;
3689
      unsigned int regno;
3690
      bitmap_and_compl_into (live, local_processed);
3691
      EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
3692
        {
3693
          REG_LIVE_LENGTH (regno) += luid;
3694
        }
3695
      bitmap_clear (local_processed);
3696
      bitmap_clear (local_live);
3697
    }
3698
}
3699
 
3700
 
3701
/* Compute register info: lifetime, bb, and number of defs and uses.  */
3702
static void
3703
df_ri_compute (struct dataflow *dflow, bitmap all_blocks ATTRIBUTE_UNUSED,
3704
               bitmap blocks_to_scan)
3705
{
3706
  unsigned int bb_index;
3707
  bitmap_iterator bi;
3708
  bitmap live = BITMAP_ALLOC (NULL);
3709
  bitmap do_not_gen = BITMAP_ALLOC (NULL);
3710
  bitmap artificial_uses = BITMAP_ALLOC (NULL);
3711
  bitmap local_live = NULL;
3712
  bitmap local_processed = NULL;
3713
  bitmap setjumps_crossed = NULL;
3714
 
3715
  if (dflow->flags & DF_RI_LIFE)
3716
    {
3717
      local_live = BITMAP_ALLOC (NULL);
3718
      local_processed = BITMAP_ALLOC (NULL);
3719
      setjumps_crossed = BITMAP_ALLOC (NULL);
3720
    }
3721
 
3722
 
3723
#ifdef REG_DEAD_DEBUGGING
3724
  df_lr_dump (dflow->df->problems_by_index [DF_LR], stderr);
3725
  print_rtl_with_bb (stderr, get_insns());
3726
#endif
3727
 
3728
  EXECUTE_IF_SET_IN_BITMAP (blocks_to_scan, 0, bb_index, bi)
3729
  {
3730
    df_ri_bb_compute (dflow, bb_index, live, do_not_gen, artificial_uses,
3731
                      local_live, local_processed, setjumps_crossed);
3732
  }
3733
 
3734
  BITMAP_FREE (live);
3735
  BITMAP_FREE (do_not_gen);
3736
  BITMAP_FREE (artificial_uses);
3737
  if (dflow->flags & DF_RI_LIFE)
3738
    {
3739
      bitmap_iterator bi;
3740
      unsigned int regno;
3741
      /* See the setjump comment in df_ri_bb_compute.  */
3742
      EXECUTE_IF_SET_IN_BITMAP (setjumps_crossed, 0, regno, bi)
3743
        {
3744
          REG_BASIC_BLOCK (regno) = REG_BLOCK_UNKNOWN;
3745
          REG_LIVE_LENGTH (regno) = -1;
3746
        }
3747
 
3748
      BITMAP_FREE (local_live);
3749
      BITMAP_FREE (local_processed);
3750
      BITMAP_FREE (setjumps_crossed);
3751
    }
3752
}
3753
 
3754
 
3755
/* Free all storage associated with the problem.  */
3756
 
3757
static void
3758
df_ri_free (struct dataflow *dflow)
3759
{
3760
  free (dflow->problem_data);
3761
  free (dflow);
3762
}
3763
 
3764
 
3765
/* Debugging info.  */
3766
 
3767
static void
3768
df_ri_dump (struct dataflow *dflow, FILE *file)
3769
{
3770
  print_rtl_with_bb (file, get_insns ());
3771
 
3772
  if (dflow->flags & DF_RI_LIFE)
3773
    {
3774
      fprintf (file, "Register info:\n");
3775
      dump_flow_info (file, -1);
3776
    }
3777
}
3778
 
3779
/* All of the information associated every instance of the problem.  */
3780
 
3781
static struct df_problem problem_RI =
3782
{
3783
  DF_RI,                      /* Problem id.  */
3784
  DF_NONE,                    /* Direction.  */
3785
  df_ri_alloc,                /* Allocate the problem specific data.  */
3786
  NULL,                       /* Reset global information.  */
3787
  NULL,                       /* Free basic block info.  */
3788
  df_ri_compute,              /* Local compute function.  */
3789
  NULL,                       /* Init the solution specific data.  */
3790
  NULL,                       /* Iterative solver.  */
3791
  NULL,                       /* Confluence operator 0.  */
3792
  NULL,                       /* Confluence operator n.  */
3793
  NULL,                       /* Transfer function.  */
3794
  NULL,                       /* Finalize function.  */
3795
  df_ri_free,                 /* Free all of the problem information.  */
3796
  df_ri_dump,                 /* Debugging.  */
3797
 
3798
  /* Technically this is only dependent on the live registers problem
3799
     but it will produce information if built one of uninitialized
3800
     register problems (UR, UREC) is also run.  */
3801
  df_lr_add_problem,          /* Dependent problem.  */
3802
 
3803
};
3804
 
3805
 
3806
/* Create a new DATAFLOW instance and add it to an existing instance
3807
   of DF.  The returned structure is what is used to get at the
3808
   solution.  */
3809
 
3810
struct dataflow *
3811
df_ri_add_problem (struct df *df, int flags)
3812
{
3813
  return df_add_problem (df, &problem_RI, flags);
3814
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.