OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [df-scan.c] - Blame information for rev 774

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Scanning of rtl for dataflow analysis.
2
   Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3
   2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
4
   Originally contributed by Michael P. Hayes
5
             (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6
   Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7
             and Kenneth Zadeck (zadeck@naturalbridge.com).
8
 
9
This file is part of GCC.
10
 
11
GCC is free software; you can redistribute it and/or modify it under
12
the terms of the GNU General Public License as published by the Free
13
Software Foundation; either version 3, or (at your option) any later
14
version.
15
 
16
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17
WARRANTY; without even the implied warranty of MERCHANTABILITY or
18
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
19
for more details.
20
 
21
You should have received a copy of the GNU General Public License
22
along with GCC; see the file COPYING3.  If not see
23
<http://www.gnu.org/licenses/>.  */
24
 
25
#include "config.h"
26
#include "system.h"
27
#include "coretypes.h"
28
#include "tm.h"
29
#include "rtl.h"
30
#include "tm_p.h"
31
#include "insn-config.h"
32
#include "recog.h"
33
#include "function.h"
34
#include "regs.h"
35
#include "output.h"
36
#include "alloc-pool.h"
37
#include "flags.h"
38
#include "hard-reg-set.h"
39
#include "basic-block.h"
40
#include "sbitmap.h"
41
#include "bitmap.h"
42
#include "timevar.h"
43
#include "tree.h"
44
#include "target.h"
45
#include "target-def.h"
46
#include "df.h"
47
#include "tree-pass.h"
48
#include "emit-rtl.h"  /* FIXME: Can go away once crtl is moved to rtl.h.  */
49
 
50
DEF_VEC_P(df_ref);
51
DEF_VEC_ALLOC_P_STACK(df_ref);
52
 
53
#define VEC_df_ref_stack_alloc(alloc) VEC_stack_alloc (df_ref, alloc)
54
 
55
typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
56
 
57
DEF_VEC_P(df_mw_hardreg_ptr);
58
DEF_VEC_ALLOC_P_STACK(df_mw_hardreg_ptr);
59
 
60
#define VEC_df_mw_hardreg_ptr_stack_alloc(alloc) \
61
  VEC_stack_alloc (df_mw_hardreg_ptr, alloc)
62
 
63
#ifndef HAVE_epilogue
64
#define HAVE_epilogue 0
65
#endif
66
#ifndef HAVE_prologue
67
#define HAVE_prologue 0
68
#endif
69
#ifndef HAVE_sibcall_epilogue
70
#define HAVE_sibcall_epilogue 0
71
#endif
72
 
73
#ifndef EPILOGUE_USES
74
#define EPILOGUE_USES(REGNO)  0
75
#endif
76
 
77
/* The following two macros free the vecs that hold either the refs or
78
   the mw refs.  They are a little tricky because the vec has 0
79
   elements is special and is not to be freed.  */
80
#define df_scan_free_ref_vec(V) \
81
  do { \
82
    if (V && *V) \
83
      free (V);  \
84
  } while (0)
85
 
86
#define df_scan_free_mws_vec(V) \
87
  do { \
88
    if (V && *V) \
89
      free (V);  \
90
  } while (0)
91
 
92
/* The set of hard registers in eliminables[i].from. */
93
 
94
static HARD_REG_SET elim_reg_set;
95
 
96
/* Initialize ur_in and ur_out as if all hard registers were partially
97
   available.  */
98
 
99
struct df_collection_rec
100
{
101
  VEC(df_ref,stack) *def_vec;
102
  VEC(df_ref,stack) *use_vec;
103
  VEC(df_ref,stack) *eq_use_vec;
104
  VEC(df_mw_hardreg_ptr,stack) *mw_vec;
105
};
106
 
107
static df_ref df_null_ref_rec[1];
108
static struct df_mw_hardreg * df_null_mw_rec[1];
109
 
110
static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
111
                           rtx, rtx *,
112
                           basic_block, struct df_insn_info *,
113
                           enum df_ref_type, int ref_flags);
114
static void df_def_record_1 (struct df_collection_rec *, rtx,
115
                             basic_block, struct df_insn_info *,
116
                             int ref_flags);
117
static void df_defs_record (struct df_collection_rec *, rtx,
118
                            basic_block, struct df_insn_info *,
119
                            int ref_flags);
120
static void df_uses_record (struct df_collection_rec *,
121
                            rtx *, enum df_ref_type,
122
                            basic_block, struct df_insn_info *,
123
                            int ref_flags);
124
 
125
static void df_install_ref_incremental (df_ref);
126
static df_ref df_ref_create_structure (enum df_ref_class,
127
                                       struct df_collection_rec *, rtx, rtx *,
128
                                       basic_block, struct df_insn_info *,
129
                                       enum df_ref_type, int ref_flags);
130
static void df_insn_refs_collect (struct df_collection_rec*,
131
                                  basic_block, struct df_insn_info *);
132
static void df_canonize_collection_rec (struct df_collection_rec *);
133
 
134
static void df_get_regular_block_artificial_uses (bitmap);
135
static void df_get_eh_block_artificial_uses (bitmap);
136
 
137
static void df_record_entry_block_defs (bitmap);
138
static void df_record_exit_block_uses (bitmap);
139
static void df_get_exit_block_use_set (bitmap);
140
static void df_get_entry_block_def_set (bitmap);
141
static void df_grow_ref_info (struct df_ref_info *, unsigned int);
142
static void df_ref_chain_delete_du_chain (df_ref *);
143
static void df_ref_chain_delete (df_ref *);
144
 
145
static void df_refs_add_to_chains (struct df_collection_rec *,
146
                                   basic_block, rtx);
147
 
148
static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
149
static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
150
static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
151
static void df_install_ref (df_ref, struct df_reg_info *,
152
                            struct df_ref_info *, bool);
153
 
154
static int df_ref_compare (const void *, const void *);
155
static int df_mw_compare (const void *, const void *);
156
 
157
/* Indexed by hardware reg number, is true if that register is ever
158
   used in the current function.
159
 
160
   In df-scan.c, this is set up to record the hard regs used
161
   explicitly.  Reload adds in the hard regs used for holding pseudo
162
   regs.  Final uses it to generate the code in the function prologue
163
   and epilogue to save and restore registers as needed.  */
164
 
165
static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
166
 
167
/*----------------------------------------------------------------------------
168
   SCANNING DATAFLOW PROBLEM
169
 
170
   There are several ways in which scanning looks just like the other
171
   dataflow problems.  It shares the all the mechanisms for local info
172
   as well as basic block info.  Where it differs is when and how often
173
   it gets run.  It also has no need for the iterative solver.
174
----------------------------------------------------------------------------*/
175
 
176
/* Problem data for the scanning dataflow function.  */
177
struct df_scan_problem_data
178
{
179
  alloc_pool ref_base_pool;
180
  alloc_pool ref_artificial_pool;
181
  alloc_pool ref_regular_pool;
182
  alloc_pool insn_pool;
183
  alloc_pool reg_pool;
184
  alloc_pool mw_reg_pool;
185
  bitmap_obstack reg_bitmaps;
186
  bitmap_obstack insn_bitmaps;
187
};
188
 
189
typedef struct df_scan_bb_info *df_scan_bb_info_t;
190
 
191
 
192
/* Internal function to shut down the scanning problem.  */
193
static void
194
df_scan_free_internal (void)
195
{
196
  struct df_scan_problem_data *problem_data
197
    = (struct df_scan_problem_data *) df_scan->problem_data;
198
  unsigned int i;
199
  basic_block bb;
200
 
201
  /* The vectors that hold the refs are not pool allocated because
202
     they come in many sizes.  This makes them impossible to delete
203
     all at once.  */
204
  for (i = 0; i < DF_INSN_SIZE(); i++)
205
    {
206
      struct df_insn_info *insn_info = DF_INSN_UID_GET(i);
207
      /* Skip the insns that have no insn_info or have been
208
         deleted.  */
209
      if (insn_info)
210
        {
211
          df_scan_free_ref_vec (insn_info->defs);
212
          df_scan_free_ref_vec (insn_info->uses);
213
          df_scan_free_ref_vec (insn_info->eq_uses);
214
          df_scan_free_mws_vec (insn_info->mw_hardregs);
215
        }
216
    }
217
 
218
  FOR_ALL_BB (bb)
219
    {
220
      unsigned int bb_index = bb->index;
221
      struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
222
      if (bb_info)
223
        {
224
          df_scan_free_ref_vec (bb_info->artificial_defs);
225
          df_scan_free_ref_vec (bb_info->artificial_uses);
226
        }
227
    }
228
 
229
  free (df->def_info.refs);
230
  free (df->def_info.begin);
231
  free (df->def_info.count);
232
  memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
233
 
234
  free (df->use_info.refs);
235
  free (df->use_info.begin);
236
  free (df->use_info.count);
237
  memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
238
 
239
  free (df->def_regs);
240
  df->def_regs = NULL;
241
  free (df->use_regs);
242
  df->use_regs = NULL;
243
  free (df->eq_use_regs);
244
  df->eq_use_regs = NULL;
245
  df->regs_size = 0;
246
  DF_REG_SIZE(df) = 0;
247
 
248
  free (df->insns);
249
  df->insns = NULL;
250
  DF_INSN_SIZE () = 0;
251
 
252
  free (df_scan->block_info);
253
  df_scan->block_info = NULL;
254
  df_scan->block_info_size = 0;
255
 
256
  bitmap_clear (&df->hardware_regs_used);
257
  bitmap_clear (&df->regular_block_artificial_uses);
258
  bitmap_clear (&df->eh_block_artificial_uses);
259
  BITMAP_FREE (df->entry_block_defs);
260
  BITMAP_FREE (df->exit_block_uses);
261
  bitmap_clear (&df->insns_to_delete);
262
  bitmap_clear (&df->insns_to_rescan);
263
  bitmap_clear (&df->insns_to_notes_rescan);
264
 
265
  free_alloc_pool (problem_data->ref_base_pool);
266
  free_alloc_pool (problem_data->ref_artificial_pool);
267
  free_alloc_pool (problem_data->ref_regular_pool);
268
  free_alloc_pool (problem_data->insn_pool);
269
  free_alloc_pool (problem_data->reg_pool);
270
  free_alloc_pool (problem_data->mw_reg_pool);
271
  bitmap_obstack_release (&problem_data->reg_bitmaps);
272
  bitmap_obstack_release (&problem_data->insn_bitmaps);
273
  free (df_scan->problem_data);
274
}
275
 
276
 
277
/* Free basic block info.  */
278
 
279
static void
280
df_scan_free_bb_info (basic_block bb, void *vbb_info)
281
{
282
  struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
283
  unsigned int bb_index = bb->index;
284
 
285
  /* See if bb_info is initialized.  */
286
  if (bb_info->artificial_defs)
287
    {
288
      rtx insn;
289
      FOR_BB_INSNS (bb, insn)
290
        {
291
          if (INSN_P (insn))
292
            /* Record defs within INSN.  */
293
            df_insn_delete (bb, INSN_UID (insn));
294
        }
295
 
296
      if (bb_index < df_scan->block_info_size)
297
        bb_info = df_scan_get_bb_info (bb_index);
298
 
299
      /* Get rid of any artificial uses or defs.  */
300
      if (bb_info->artificial_defs)
301
        {
302
          df_ref_chain_delete_du_chain (bb_info->artificial_defs);
303
          df_ref_chain_delete_du_chain (bb_info->artificial_uses);
304
          df_ref_chain_delete (bb_info->artificial_defs);
305
          df_ref_chain_delete (bb_info->artificial_uses);
306
          bb_info->artificial_defs = NULL;
307
          bb_info->artificial_uses = NULL;
308
        }
309
    }
310
}
311
 
312
 
313
/* Allocate the problem data for the scanning problem.  This should be
314
   called when the problem is created or when the entire function is to
315
   be rescanned.  */
316
void
317
df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
318
{
319
  struct df_scan_problem_data *problem_data;
320
  unsigned int insn_num = get_max_uid () + 1;
321
  unsigned int block_size = 400;
322
  basic_block bb;
323
 
324
  /* Given the number of pools, this is really faster than tearing
325
     everything apart.  */
326
  if (df_scan->problem_data)
327
    df_scan_free_internal ();
328
 
329
  problem_data = XNEW (struct df_scan_problem_data);
330
  df_scan->problem_data = problem_data;
331
  df_scan->computed = true;
332
 
333
  problem_data->ref_base_pool
334
    = create_alloc_pool ("df_scan ref base",
335
                         sizeof (struct df_base_ref), block_size);
336
  problem_data->ref_artificial_pool
337
    = create_alloc_pool ("df_scan ref artificial",
338
                         sizeof (struct df_artificial_ref), block_size);
339
  problem_data->ref_regular_pool
340
    = create_alloc_pool ("df_scan ref regular",
341
                         sizeof (struct df_regular_ref), block_size);
342
  problem_data->insn_pool
343
    = create_alloc_pool ("df_scan insn",
344
                         sizeof (struct df_insn_info), block_size);
345
  problem_data->reg_pool
346
    = create_alloc_pool ("df_scan reg",
347
                         sizeof (struct df_reg_info), block_size);
348
  problem_data->mw_reg_pool
349
    = create_alloc_pool ("df_scan mw_reg",
350
                         sizeof (struct df_mw_hardreg), block_size);
351
 
352
  bitmap_obstack_initialize (&problem_data->reg_bitmaps);
353
  bitmap_obstack_initialize (&problem_data->insn_bitmaps);
354
 
355
  insn_num += insn_num / 4;
356
  df_grow_reg_info ();
357
 
358
  df_grow_insn_info ();
359
  df_grow_bb_info (df_scan);
360
 
361
  FOR_ALL_BB (bb)
362
    {
363
      unsigned int bb_index = bb->index;
364
      struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
365
      bb_info->artificial_defs = NULL;
366
      bb_info->artificial_uses = NULL;
367
    }
368
 
369
  bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps);
370
  bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps);
371
  bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps);
372
  df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
373
  df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
374
  bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps);
375
  bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps);
376
  bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps);
377
  df_scan->optional_p = false;
378
}
379
 
380
 
381
/* Free all of the data associated with the scan problem.  */
382
 
383
static void
384
df_scan_free (void)
385
{
386
  if (df_scan->problem_data)
387
    df_scan_free_internal ();
388
 
389
  if (df->blocks_to_analyze)
390
    {
391
      BITMAP_FREE (df->blocks_to_analyze);
392
      df->blocks_to_analyze = NULL;
393
    }
394
 
395
  free (df_scan);
396
}
397
 
398
/* Dump the preamble for DF_SCAN dump. */
399
static void
400
df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
401
{
402
  int i;
403
  int dcount = 0;
404
  int ucount = 0;
405
  int ecount = 0;
406
  int icount = 0;
407
  int ccount = 0;
408
  basic_block bb;
409
  rtx insn;
410
 
411
  fprintf (file, ";;  invalidated by call \t");
412
  df_print_regset (file, regs_invalidated_by_call_regset);
413
  fprintf (file, ";;  hardware regs used \t");
414
  df_print_regset (file, &df->hardware_regs_used);
415
  fprintf (file, ";;  regular block artificial uses \t");
416
  df_print_regset (file, &df->regular_block_artificial_uses);
417
  fprintf (file, ";;  eh block artificial uses \t");
418
  df_print_regset (file, &df->eh_block_artificial_uses);
419
  fprintf (file, ";;  entry block defs \t");
420
  df_print_regset (file, df->entry_block_defs);
421
  fprintf (file, ";;  exit block uses \t");
422
  df_print_regset (file, df->exit_block_uses);
423
  fprintf (file, ";;  regs ever live \t");
424
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
425
    if (df_regs_ever_live_p (i))
426
      fprintf (file, " %d[%s]", i, reg_names[i]);
427
  fprintf (file, "\n;;  ref usage \t");
428
 
429
  for (i = 0; i < (int)df->regs_inited; i++)
430
    if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
431
      {
432
        const char * sep = "";
433
 
434
        fprintf (file, "r%d={", i);
435
        if (DF_REG_DEF_COUNT (i))
436
          {
437
            fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
438
            sep = ",";
439
            dcount += DF_REG_DEF_COUNT (i);
440
          }
441
        if (DF_REG_USE_COUNT (i))
442
          {
443
            fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
444
            sep = ",";
445
            ucount += DF_REG_USE_COUNT (i);
446
          }
447
        if (DF_REG_EQ_USE_COUNT (i))
448
          {
449
            fprintf (file, "%s%de", sep, DF_REG_EQ_USE_COUNT (i));
450
            ecount += DF_REG_EQ_USE_COUNT (i);
451
          }
452
        fprintf (file, "} ");
453
      }
454
 
455
  FOR_EACH_BB (bb)
456
    FOR_BB_INSNS (bb, insn)
457
      if (INSN_P (insn))
458
        {
459
          if (CALL_P (insn))
460
            ccount++;
461
          else
462
            icount++;
463
        }
464
 
465
  fprintf (file, "\n;;    total ref usage %d{%dd,%du,%de}"
466
                 " in %d{%d regular + %d call} insns.\n",
467
                 dcount + ucount + ecount, dcount, ucount, ecount,
468
                 icount + ccount, icount, ccount);
469
}
470
 
471
/* Dump the bb_info for a given basic block. */
472
static void
473
df_scan_start_block (basic_block bb, FILE *file)
474
{
475
  struct df_scan_bb_info *bb_info
476
    = df_scan_get_bb_info (bb->index);
477
 
478
  if (bb_info)
479
    {
480
      fprintf (file, ";; bb %d artificial_defs: ", bb->index);
481
      df_refs_chain_dump (bb_info->artificial_defs, true, file);
482
      fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
483
      df_refs_chain_dump (bb_info->artificial_uses, true, file);
484
      fprintf (file, "\n");
485
    }
486
#if 0
487
  {
488
    rtx insn;
489
    FOR_BB_INSNS (bb, insn)
490
      if (INSN_P (insn))
491
        df_insn_debug (insn, false, file);
492
  }
493
#endif
494
}
495
 
496
static struct df_problem problem_SCAN =
497
{
498
  DF_SCAN,                    /* Problem id.  */
499
  DF_NONE,                    /* Direction.  */
500
  df_scan_alloc,              /* Allocate the problem specific data.  */
501
  NULL,                       /* Reset global information.  */
502
  df_scan_free_bb_info,       /* Free basic block info.  */
503
  NULL,                       /* Local compute function.  */
504
  NULL,                       /* Init the solution specific data.  */
505
  NULL,                       /* Iterative solver.  */
506
  NULL,                       /* Confluence operator 0.  */
507
  NULL,                       /* Confluence operator n.  */
508
  NULL,                       /* Transfer function.  */
509
  NULL,                       /* Finalize function.  */
510
  df_scan_free,               /* Free all of the problem information.  */
511
  NULL,                       /* Remove this problem from the stack of dataflow problems.  */
512
  df_scan_start_dump,         /* Debugging.  */
513
  df_scan_start_block,        /* Debugging start block.  */
514
  NULL,                       /* Debugging end block.  */
515
  NULL,                       /* Incremental solution verify start.  */
516
  NULL,                       /* Incremental solution verify end.  */
517
  NULL,                       /* Dependent problem.  */
518
  sizeof (struct df_scan_bb_info),/* Size of entry of block_info array.  */
519
  TV_DF_SCAN,                 /* Timing variable.  */
520
  false                       /* Reset blocks on dropping out of blocks_to_analyze.  */
521
};
522
 
523
 
524
/* Create a new DATAFLOW instance and add it to an existing instance
525
   of DF.  The returned structure is what is used to get at the
526
   solution.  */
527
 
528
void
529
df_scan_add_problem (void)
530
{
531
  df_add_problem (&problem_SCAN);
532
}
533
 
534
 
535
/*----------------------------------------------------------------------------
536
   Storage Allocation Utilities
537
----------------------------------------------------------------------------*/
538
 
539
 
540
/* First, grow the reg_info information.  If the current size is less than
541
   the number of pseudos, grow to 25% more than the number of
542
   pseudos.
543
 
544
   Second, assure that all of the slots up to max_reg_num have been
545
   filled with reg_info structures.  */
546
 
547
void
548
df_grow_reg_info (void)
549
{
550
  unsigned int max_reg = max_reg_num ();
551
  unsigned int new_size = max_reg;
552
  struct df_scan_problem_data *problem_data
553
    = (struct df_scan_problem_data *) df_scan->problem_data;
554
  unsigned int i;
555
 
556
  if (df->regs_size < new_size)
557
    {
558
      new_size += new_size / 4;
559
      df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
560
      df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
561
      df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
562
                                    new_size);
563
      df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
564
      df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
565
      df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
566
      df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
567
      df->regs_size = new_size;
568
    }
569
 
570
  for (i = df->regs_inited; i < max_reg; i++)
571
    {
572
      struct df_reg_info *reg_info;
573
 
574
      reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
575
      memset (reg_info, 0, sizeof (struct df_reg_info));
576
      df->def_regs[i] = reg_info;
577
      reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
578
      memset (reg_info, 0, sizeof (struct df_reg_info));
579
      df->use_regs[i] = reg_info;
580
      reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
581
      memset (reg_info, 0, sizeof (struct df_reg_info));
582
      df->eq_use_regs[i] = reg_info;
583
      df->def_info.begin[i] = 0;
584
      df->def_info.count[i] = 0;
585
      df->use_info.begin[i] = 0;
586
      df->use_info.count[i] = 0;
587
    }
588
 
589
  df->regs_inited = max_reg;
590
}
591
 
592
 
593
/* Grow the ref information.  */
594
 
595
static void
596
df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
597
{
598
  if (ref_info->refs_size < new_size)
599
    {
600
      ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
601
      memset (ref_info->refs + ref_info->refs_size, 0,
602
              (new_size - ref_info->refs_size) *sizeof (df_ref));
603
      ref_info->refs_size = new_size;
604
    }
605
}
606
 
607
 
608
/* Check and grow the ref information if necessary.  This routine
609
   guarantees total_size + BITMAP_ADDEND amount of entries in refs
610
   array.  It updates ref_info->refs_size only and does not change
611
   ref_info->total_size.  */
612
 
613
static void
614
df_check_and_grow_ref_info (struct df_ref_info *ref_info,
615
                            unsigned bitmap_addend)
616
{
617
  if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
618
    {
619
      int new_size = ref_info->total_size + bitmap_addend;
620
      new_size += ref_info->total_size / 4;
621
      df_grow_ref_info (ref_info, new_size);
622
    }
623
}
624
 
625
 
626
/* Grow the ref information.  If the current size is less than the
627
   number of instructions, grow to 25% more than the number of
628
   instructions.  */
629
 
630
void
631
df_grow_insn_info (void)
632
{
633
  unsigned int new_size = get_max_uid () + 1;
634
  if (DF_INSN_SIZE () < new_size)
635
    {
636
      new_size += new_size / 4;
637
      df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
638
      memset (df->insns + df->insns_size, 0,
639
              (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
640
      DF_INSN_SIZE () = new_size;
641
    }
642
}
643
 
644
 
645
 
646
 
647
/*----------------------------------------------------------------------------
648
   PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
649
----------------------------------------------------------------------------*/
650
 
651
/* Rescan all of the block_to_analyze or all of the blocks in the
652
   function if df_set_blocks if blocks_to_analyze is NULL;  */
653
 
654
void
655
df_scan_blocks (void)
656
{
657
  basic_block bb;
658
 
659
  df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
660
  df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
661
 
662
  df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses);
663
  df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses);
664
 
665
  bitmap_ior_into (&df->eh_block_artificial_uses,
666
                   &df->regular_block_artificial_uses);
667
 
668
  /* ENTRY and EXIT blocks have special defs/uses.  */
669
  df_get_entry_block_def_set (df->entry_block_defs);
670
  df_record_entry_block_defs (df->entry_block_defs);
671
  df_get_exit_block_use_set (df->exit_block_uses);
672
  df_record_exit_block_uses (df->exit_block_uses);
673
  df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
674
  df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
675
 
676
  /* Regular blocks */
677
  FOR_EACH_BB (bb)
678
    {
679
      unsigned int bb_index = bb->index;
680
      df_bb_refs_record (bb_index, true);
681
    }
682
}
683
 
684
/* Create new refs under address LOC within INSN.  This function is
685
   only used externally.  REF_FLAGS must be either 0 or DF_REF_IN_NOTE,
686
   depending on whether LOC is inside PATTERN (INSN) or a note.  */
687
 
688
void
689
df_uses_create (rtx *loc, rtx insn, int ref_flags)
690
{
691
  gcc_assert (!(ref_flags & ~DF_REF_IN_NOTE));
692
  df_uses_record (NULL, loc, DF_REF_REG_USE,
693
                  BLOCK_FOR_INSN (insn),
694
                  DF_INSN_INFO_GET (insn),
695
                  ref_flags);
696
}
697
 
698
/* Create a new ref of type DF_REF_TYPE for register REG at address
699
   LOC within INSN of BB.  This function is only used externally.  */
700
 
701
df_ref
702
df_ref_create (rtx reg, rtx *loc, rtx insn,
703
               basic_block bb,
704
               enum df_ref_type ref_type,
705
               int ref_flags)
706
{
707
  enum df_ref_class cl;
708
 
709
  df_grow_reg_info ();
710
 
711
  /* You cannot hack artificial refs.  */
712
  gcc_assert (insn);
713
 
714
  if (loc)
715
    cl = DF_REF_REGULAR;
716
  else
717
    cl = DF_REF_BASE;
718
 
719
  return df_ref_create_structure (cl, NULL, reg, loc, bb,
720
                                  DF_INSN_INFO_GET (insn),
721
                                  ref_type, ref_flags);
722
}
723
 
724
static void
725
df_install_ref_incremental (df_ref ref)
726
{
727
  struct df_reg_info **reg_info;
728
  struct df_ref_info *ref_info;
729
  df_ref *ref_rec;
730
  df_ref **ref_rec_ptr;
731
  unsigned int count = 0;
732
  bool add_to_table;
733
 
734
  rtx insn = DF_REF_INSN (ref);
735
  basic_block bb = BLOCK_FOR_INSN (insn);
736
 
737
  if (DF_REF_REG_DEF_P (ref))
738
    {
739
      reg_info = df->def_regs;
740
      ref_info = &df->def_info;
741
      ref_rec_ptr = &DF_INSN_DEFS (insn);
742
      add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
743
    }
744
  else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
745
    {
746
      reg_info = df->eq_use_regs;
747
      ref_info = &df->use_info;
748
      ref_rec_ptr = &DF_INSN_EQ_USES (insn);
749
      switch (ref_info->ref_order)
750
        {
751
        case DF_REF_ORDER_UNORDERED_WITH_NOTES:
752
        case DF_REF_ORDER_BY_REG_WITH_NOTES:
753
        case DF_REF_ORDER_BY_INSN_WITH_NOTES:
754
          add_to_table = true;
755
          break;
756
        default:
757
          add_to_table = false;
758
          break;
759
        }
760
    }
761
  else
762
    {
763
      reg_info = df->use_regs;
764
      ref_info = &df->use_info;
765
      ref_rec_ptr = &DF_INSN_USES (insn);
766
      add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
767
    }
768
 
769
  /* Do not add if ref is not in the right blocks.  */
770
  if (add_to_table && df->analyze_subset)
771
    add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
772
 
773
  df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
774
 
775
  if (add_to_table)
776
    switch (ref_info->ref_order)
777
      {
778
      case DF_REF_ORDER_UNORDERED_WITH_NOTES:
779
      case DF_REF_ORDER_BY_REG_WITH_NOTES:
780
      case DF_REF_ORDER_BY_INSN_WITH_NOTES:
781
        ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
782
        break;
783
      default:
784
        ref_info->ref_order = DF_REF_ORDER_UNORDERED;
785
        break;
786
      }
787
 
788
  ref_rec = *ref_rec_ptr;
789
  while (*ref_rec)
790
    {
791
      count++;
792
      ref_rec++;
793
    }
794
 
795
  ref_rec = *ref_rec_ptr;
796
  if (count)
797
    {
798
      ref_rec = XRESIZEVEC (df_ref, ref_rec, count+2);
799
      *ref_rec_ptr = ref_rec;
800
      ref_rec[count] = ref;
801
      ref_rec[count+1] = NULL;
802
      qsort (ref_rec, count + 1, sizeof (df_ref), df_ref_compare);
803
    }
804
  else
805
    {
806
      df_ref *ref_rec = XNEWVEC (df_ref, 2);
807
      ref_rec[0] = ref;
808
      ref_rec[1] = NULL;
809
      *ref_rec_ptr = ref_rec;
810
    }
811
 
812
#if 0
813
  if (dump_file)
814
    {
815
      fprintf (dump_file, "adding ref ");
816
      df_ref_debug (ref, dump_file);
817
    }
818
#endif
819
  /* By adding the ref directly, df_insn_rescan my not find any
820
     differences even though the block will have changed.  So we need
821
     to mark the block dirty ourselves.  */
822
  if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
823
    df_set_bb_dirty (bb);
824
}
825
 
826
 
827
 
828
/*----------------------------------------------------------------------------
829
   UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
830
----------------------------------------------------------------------------*/
831
 
832
static void
833
df_free_ref (df_ref ref)
834
{
835
  struct df_scan_problem_data *problem_data
836
    = (struct df_scan_problem_data *) df_scan->problem_data;
837
 
838
  switch (DF_REF_CLASS (ref))
839
    {
840
    case DF_REF_BASE:
841
      pool_free (problem_data->ref_base_pool, ref);
842
      break;
843
 
844
    case DF_REF_ARTIFICIAL:
845
      pool_free (problem_data->ref_artificial_pool, ref);
846
      break;
847
 
848
    case DF_REF_REGULAR:
849
      pool_free (problem_data->ref_regular_pool, ref);
850
      break;
851
    }
852
}
853
 
854
 
855
/* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
856
   Also delete the def-use or use-def chain if it exists.  */
857
 
858
static void
859
df_reg_chain_unlink (df_ref ref)
860
{
861
  df_ref next = DF_REF_NEXT_REG (ref);
862
  df_ref prev = DF_REF_PREV_REG (ref);
863
  int id = DF_REF_ID (ref);
864
  struct df_reg_info *reg_info;
865
  df_ref *refs = NULL;
866
 
867
  if (DF_REF_REG_DEF_P (ref))
868
    {
869
      int regno = DF_REF_REGNO (ref);
870
      reg_info = DF_REG_DEF_GET (regno);
871
      refs = df->def_info.refs;
872
    }
873
  else
874
    {
875
      if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
876
        {
877
          reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
878
          switch (df->use_info.ref_order)
879
            {
880
            case DF_REF_ORDER_UNORDERED_WITH_NOTES:
881
            case DF_REF_ORDER_BY_REG_WITH_NOTES:
882
            case DF_REF_ORDER_BY_INSN_WITH_NOTES:
883
              refs = df->use_info.refs;
884
              break;
885
            default:
886
              break;
887
            }
888
        }
889
      else
890
        {
891
          reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
892
          refs = df->use_info.refs;
893
        }
894
    }
895
 
896
  if (refs)
897
    {
898
      if (df->analyze_subset)
899
        {
900
          if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
901
            refs[id] = NULL;
902
        }
903
      else
904
        refs[id] = NULL;
905
    }
906
 
907
  /* Delete any def-use or use-def chains that start here. It is
908
     possible that there is trash in this field.  This happens for
909
     insns that have been deleted when rescanning has been deferred
910
     and the chain problem has also been deleted.  The chain tear down
911
     code skips deleted insns.  */
912
  if (df_chain && DF_REF_CHAIN (ref))
913
    df_chain_unlink (ref);
914
 
915
  reg_info->n_refs--;
916
  if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
917
    {
918
      gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
919
      df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
920
    }
921
 
922
  /* Unlink from the reg chain.  If there is no prev, this is the
923
     first of the list.  If not, just join the next and prev.  */
924
  if (prev)
925
    DF_REF_NEXT_REG (prev) = next;
926
  else
927
    {
928
      gcc_assert (reg_info->reg_chain == ref);
929
      reg_info->reg_chain = next;
930
    }
931
  if (next)
932
    DF_REF_PREV_REG (next) = prev;
933
 
934
  df_free_ref (ref);
935
}
936
 
937
 
938
/* Remove REF from VEC.  */
939
 
940
static void
941
df_ref_compress_rec (df_ref **vec_ptr, df_ref ref)
942
{
943
  df_ref *vec = *vec_ptr;
944
 
945
  if (vec[1])
946
    {
947
      while (*vec && *vec != ref)
948
        vec++;
949
 
950
      while (*vec)
951
        {
952
          *vec = *(vec+1);
953
          vec++;
954
        }
955
    }
956
  else
957
    {
958
      free (vec);
959
      *vec_ptr = df_null_ref_rec;
960
    }
961
}
962
 
963
 
964
/* Unlink REF from all def-use/use-def chains, etc.  */
965
 
966
void
967
df_ref_remove (df_ref ref)
968
{
969
#if 0
970
  if (dump_file)
971
    {
972
      fprintf (dump_file, "removing ref ");
973
      df_ref_debug (ref, dump_file);
974
    }
975
#endif
976
 
977
  if (DF_REF_REG_DEF_P (ref))
978
    {
979
      if (DF_REF_IS_ARTIFICIAL (ref))
980
        {
981
          struct df_scan_bb_info *bb_info
982
            = df_scan_get_bb_info (DF_REF_BBNO (ref));
983
          df_ref_compress_rec (&bb_info->artificial_defs, ref);
984
        }
985
      else
986
        {
987
          unsigned int uid = DF_REF_INSN_UID (ref);
988
          struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
989
          df_ref_compress_rec (&insn_rec->defs, ref);
990
        }
991
    }
992
  else
993
    {
994
      if (DF_REF_IS_ARTIFICIAL (ref))
995
        {
996
          struct df_scan_bb_info *bb_info
997
            = df_scan_get_bb_info (DF_REF_BBNO (ref));
998
          df_ref_compress_rec (&bb_info->artificial_uses, ref);
999
        }
1000
      else
1001
        {
1002
          unsigned int uid = DF_REF_INSN_UID (ref);
1003
          struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
1004
 
1005
          if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
1006
            df_ref_compress_rec (&insn_rec->eq_uses, ref);
1007
          else
1008
            df_ref_compress_rec (&insn_rec->uses, ref);
1009
        }
1010
    }
1011
 
1012
  /* By deleting the ref directly, df_insn_rescan my not find any
1013
     differences even though the block will have changed.  So we need
1014
     to mark the block dirty ourselves.  */
1015
  if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
1016
    df_set_bb_dirty (DF_REF_BB (ref));
1017
  df_reg_chain_unlink (ref);
1018
}
1019
 
1020
 
1021
/* Create the insn record for INSN.  If there was one there, zero it
1022
   out.  */
1023
 
1024
struct df_insn_info *
1025
df_insn_create_insn_record (rtx insn)
1026
{
1027
  struct df_scan_problem_data *problem_data
1028
    = (struct df_scan_problem_data *) df_scan->problem_data;
1029
  struct df_insn_info *insn_rec;
1030
 
1031
  df_grow_insn_info ();
1032
  insn_rec = DF_INSN_INFO_GET (insn);
1033
  if (!insn_rec)
1034
    {
1035
      insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
1036
      DF_INSN_INFO_SET (insn, insn_rec);
1037
    }
1038
  memset (insn_rec, 0, sizeof (struct df_insn_info));
1039
  insn_rec->insn = insn;
1040
  return insn_rec;
1041
}
1042
 
1043
 
1044
/* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain.  */
1045
 
1046
static void
1047
df_ref_chain_delete_du_chain (df_ref *ref_rec)
1048
{
1049
  while (*ref_rec)
1050
    {
1051
      df_ref ref = *ref_rec;
1052
      /* CHAIN is allocated by DF_CHAIN. So make sure to
1053
         pass df_scan instance for the problem.  */
1054
      if (DF_REF_CHAIN (ref))
1055
        df_chain_unlink (ref);
1056
      ref_rec++;
1057
    }
1058
}
1059
 
1060
 
1061
/* Delete all refs in the ref chain.  */
1062
 
1063
static void
1064
df_ref_chain_delete (df_ref *ref_rec)
1065
{
1066
  df_ref *start = ref_rec;
1067
  while (*ref_rec)
1068
    {
1069
      df_reg_chain_unlink (*ref_rec);
1070
      ref_rec++;
1071
    }
1072
 
1073
  /* If the list is empty, it has a special shared element that is not
1074
     to be deleted.  */
1075
  if (*start)
1076
    free (start);
1077
}
1078
 
1079
 
1080
/* Delete the hardreg chain.  */
1081
 
1082
static void
1083
df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
1084
{
1085
  struct df_scan_problem_data *problem_data;
1086
 
1087
  if (!hardregs)
1088
    return;
1089
 
1090
  problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
1091
 
1092
  while (*hardregs)
1093
    {
1094
      pool_free (problem_data->mw_reg_pool, *hardregs);
1095
      hardregs++;
1096
    }
1097
}
1098
 
1099
 
1100
/* Delete all of the refs information from INSN.  BB must be passed in
1101
   except when called from df_process_deferred_rescans to mark the block
1102
   as dirty.  */
1103
 
1104
void
1105
df_insn_delete (basic_block bb, unsigned int uid)
1106
{
1107
  struct df_insn_info *insn_info = NULL;
1108
  if (!df)
1109
    return;
1110
 
1111
  df_grow_bb_info (df_scan);
1112
  df_grow_reg_info ();
1113
 
1114
  /* The block must be marked as dirty now, rather than later as in
1115
     df_insn_rescan and df_notes_rescan because it may not be there at
1116
     rescanning time and the mark would blow up.  */
1117
  if (bb)
1118
    df_set_bb_dirty (bb);
1119
 
1120
  insn_info = DF_INSN_UID_SAFE_GET (uid);
1121
 
1122
  /* The client has deferred rescanning.  */
1123
  if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1124
    {
1125
      if (insn_info)
1126
        {
1127
          bitmap_clear_bit (&df->insns_to_rescan, uid);
1128
          bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1129
          bitmap_set_bit (&df->insns_to_delete, uid);
1130
        }
1131
      if (dump_file)
1132
        fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1133
      return;
1134
    }
1135
 
1136
  if (dump_file)
1137
    fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1138
 
1139
  bitmap_clear_bit (&df->insns_to_delete, uid);
1140
  bitmap_clear_bit (&df->insns_to_rescan, uid);
1141
  bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1142
  if (insn_info)
1143
    {
1144
      struct df_scan_problem_data *problem_data
1145
        = (struct df_scan_problem_data *) df_scan->problem_data;
1146
 
1147
      /* In general, notes do not have the insn_info fields
1148
         initialized.  However, combine deletes insns by changing them
1149
         to notes.  How clever.  So we cannot just check if it is a
1150
         valid insn before short circuiting this code, we need to see
1151
         if we actually initialized it.  */
1152
      if (insn_info->defs)
1153
        {
1154
          df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1155
 
1156
          if (df_chain)
1157
            {
1158
              df_ref_chain_delete_du_chain (insn_info->defs);
1159
              df_ref_chain_delete_du_chain (insn_info->uses);
1160
              df_ref_chain_delete_du_chain (insn_info->eq_uses);
1161
            }
1162
 
1163
          df_ref_chain_delete (insn_info->defs);
1164
          df_ref_chain_delete (insn_info->uses);
1165
          df_ref_chain_delete (insn_info->eq_uses);
1166
        }
1167
      pool_free (problem_data->insn_pool, insn_info);
1168
      DF_INSN_UID_SET (uid, NULL);
1169
    }
1170
}
1171
 
1172
 
1173
/* Free all of the refs and the mw_hardregs in COLLECTION_REC.  */
1174
 
1175
static void
1176
df_free_collection_rec (struct df_collection_rec *collection_rec)
1177
{
1178
  unsigned int ix;
1179
  struct df_scan_problem_data *problem_data
1180
    = (struct df_scan_problem_data *) df_scan->problem_data;
1181
  df_ref ref;
1182
  struct df_mw_hardreg *mw;
1183
 
1184
  FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, ix, ref)
1185
    df_free_ref (ref);
1186
  FOR_EACH_VEC_ELT (df_ref, collection_rec->use_vec, ix, ref)
1187
    df_free_ref (ref);
1188
  FOR_EACH_VEC_ELT (df_ref, collection_rec->eq_use_vec, ix, ref)
1189
    df_free_ref (ref);
1190
  FOR_EACH_VEC_ELT (df_mw_hardreg_ptr, collection_rec->mw_vec, ix, mw)
1191
    pool_free (problem_data->mw_reg_pool, mw);
1192
 
1193
  VEC_free (df_ref, stack, collection_rec->def_vec);
1194
  VEC_free (df_ref, stack, collection_rec->use_vec);
1195
  VEC_free (df_ref, stack, collection_rec->eq_use_vec);
1196
  VEC_free (df_mw_hardreg_ptr, stack, collection_rec->mw_vec);
1197
}
1198
 
1199
/* Rescan INSN.  Return TRUE if the rescanning produced any changes.  */
1200
 
1201
bool
1202
df_insn_rescan (rtx insn)
1203
{
1204
  unsigned int uid = INSN_UID (insn);
1205
  struct df_insn_info *insn_info = NULL;
1206
  basic_block bb = BLOCK_FOR_INSN (insn);
1207
  struct df_collection_rec collection_rec;
1208
 
1209
  if ((!df) || (!INSN_P (insn)))
1210
    return false;
1211
 
1212
  if (!bb)
1213
    {
1214
      if (dump_file)
1215
        fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1216
      return false;
1217
    }
1218
 
1219
  /* The client has disabled rescanning and plans to do it itself.  */
1220
  if (df->changeable_flags & DF_NO_INSN_RESCAN)
1221
    return false;
1222
 
1223
  df_grow_bb_info (df_scan);
1224
  df_grow_reg_info ();
1225
 
1226
  insn_info = DF_INSN_UID_SAFE_GET (uid);
1227
 
1228
  /* The client has deferred rescanning.  */
1229
  if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1230
    {
1231
      if (!insn_info)
1232
        {
1233
          insn_info = df_insn_create_insn_record (insn);
1234
          insn_info->defs = df_null_ref_rec;
1235
          insn_info->uses = df_null_ref_rec;
1236
          insn_info->eq_uses = df_null_ref_rec;
1237
          insn_info->mw_hardregs = df_null_mw_rec;
1238
        }
1239
      if (dump_file)
1240
        fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1241
 
1242
      bitmap_clear_bit (&df->insns_to_delete, uid);
1243
      bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1244
      bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
1245
      return false;
1246
    }
1247
 
1248
  collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
1249
  collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
1250
  collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
1251
  collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
1252
 
1253
  bitmap_clear_bit (&df->insns_to_delete, uid);
1254
  bitmap_clear_bit (&df->insns_to_rescan, uid);
1255
  bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1256
  if (insn_info)
1257
    {
1258
      int luid;
1259
      bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1260
      /* If there's no change, return false. */
1261
      if (the_same)
1262
        {
1263
          df_free_collection_rec (&collection_rec);
1264
          if (dump_file)
1265
            fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1266
          return false;
1267
        }
1268
      if (dump_file)
1269
        fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1270
 
1271
      /* There's change - we need to delete the existing info.
1272
         Since the insn isn't moved, we can salvage its LUID.  */
1273
      luid = DF_INSN_LUID (insn);
1274
      df_insn_delete (NULL, uid);
1275
      df_insn_create_insn_record (insn);
1276
      DF_INSN_LUID (insn) = luid;
1277
    }
1278
  else
1279
    {
1280
      struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1281
      df_insn_refs_collect (&collection_rec, bb, insn_info);
1282
      if (dump_file)
1283
        fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1284
    }
1285
 
1286
  df_refs_add_to_chains (&collection_rec, bb, insn);
1287
  if (!DEBUG_INSN_P (insn))
1288
    df_set_bb_dirty (bb);
1289
 
1290
  VEC_free (df_ref, stack, collection_rec.def_vec);
1291
  VEC_free (df_ref, stack, collection_rec.use_vec);
1292
  VEC_free (df_ref, stack, collection_rec.eq_use_vec);
1293
  VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
1294
 
1295
  return true;
1296
}
1297
 
1298
/* Same as df_insn_rescan, but don't mark the basic block as
1299
   dirty.  */
1300
 
1301
bool
1302
df_insn_rescan_debug_internal (rtx insn)
1303
{
1304
  unsigned int uid = INSN_UID (insn);
1305
  struct df_insn_info *insn_info;
1306
 
1307
  gcc_assert (DEBUG_INSN_P (insn)
1308
              && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1309
 
1310
  if (!df)
1311
    return false;
1312
 
1313
  insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1314
  if (!insn_info)
1315
    return false;
1316
 
1317
  if (dump_file)
1318
    fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
1319
 
1320
  bitmap_clear_bit (&df->insns_to_delete, uid);
1321
  bitmap_clear_bit (&df->insns_to_rescan, uid);
1322
  bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1323
 
1324
  if (!insn_info->defs)
1325
    return false;
1326
 
1327
  if (insn_info->defs == df_null_ref_rec
1328
      && insn_info->uses == df_null_ref_rec
1329
      && insn_info->eq_uses == df_null_ref_rec
1330
      && insn_info->mw_hardregs == df_null_mw_rec)
1331
    return false;
1332
 
1333
  df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1334
 
1335
  if (df_chain)
1336
    {
1337
      df_ref_chain_delete_du_chain (insn_info->defs);
1338
      df_ref_chain_delete_du_chain (insn_info->uses);
1339
      df_ref_chain_delete_du_chain (insn_info->eq_uses);
1340
    }
1341
 
1342
  df_ref_chain_delete (insn_info->defs);
1343
  df_ref_chain_delete (insn_info->uses);
1344
  df_ref_chain_delete (insn_info->eq_uses);
1345
 
1346
  insn_info->defs = df_null_ref_rec;
1347
  insn_info->uses = df_null_ref_rec;
1348
  insn_info->eq_uses = df_null_ref_rec;
1349
  insn_info->mw_hardregs = df_null_mw_rec;
1350
 
1351
  return true;
1352
}
1353
 
1354
 
1355
/* Rescan all of the insns in the function.  Note that the artificial
1356
   uses and defs are not touched.  This function will destroy def-se
1357
   or use-def chains.  */
1358
 
1359
void
1360
df_insn_rescan_all (void)
1361
{
1362
  bool no_insn_rescan = false;
1363
  bool defer_insn_rescan = false;
1364
  basic_block bb;
1365
  bitmap_iterator bi;
1366
  unsigned int uid;
1367
  bitmap_head tmp;
1368
 
1369
  bitmap_initialize (&tmp, &df_bitmap_obstack);
1370
 
1371
  if (df->changeable_flags & DF_NO_INSN_RESCAN)
1372
    {
1373
      df_clear_flags (DF_NO_INSN_RESCAN);
1374
      no_insn_rescan = true;
1375
    }
1376
 
1377
  if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1378
    {
1379
      df_clear_flags (DF_DEFER_INSN_RESCAN);
1380
      defer_insn_rescan = true;
1381
    }
1382
 
1383
  bitmap_copy (&tmp, &df->insns_to_delete);
1384
  EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1385
    {
1386
      struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1387
      if (insn_info)
1388
        df_insn_delete (NULL, uid);
1389
    }
1390
 
1391
  bitmap_clear (&tmp);
1392
  bitmap_clear (&df->insns_to_delete);
1393
  bitmap_clear (&df->insns_to_rescan);
1394
  bitmap_clear (&df->insns_to_notes_rescan);
1395
 
1396
  FOR_EACH_BB (bb)
1397
    {
1398
      rtx insn;
1399
      FOR_BB_INSNS (bb, insn)
1400
        {
1401
          df_insn_rescan (insn);
1402
        }
1403
    }
1404
 
1405
  if (no_insn_rescan)
1406
    df_set_flags (DF_NO_INSN_RESCAN);
1407
  if (defer_insn_rescan)
1408
    df_set_flags (DF_DEFER_INSN_RESCAN);
1409
}
1410
 
1411
 
1412
/* Process all of the deferred rescans or deletions.  */
1413
 
1414
void
1415
df_process_deferred_rescans (void)
1416
{
1417
  bool no_insn_rescan = false;
1418
  bool defer_insn_rescan = false;
1419
  bitmap_iterator bi;
1420
  unsigned int uid;
1421
  bitmap_head tmp;
1422
 
1423
  bitmap_initialize (&tmp, &df_bitmap_obstack);
1424
 
1425
  if (df->changeable_flags & DF_NO_INSN_RESCAN)
1426
    {
1427
      df_clear_flags (DF_NO_INSN_RESCAN);
1428
      no_insn_rescan = true;
1429
    }
1430
 
1431
  if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1432
    {
1433
      df_clear_flags (DF_DEFER_INSN_RESCAN);
1434
      defer_insn_rescan = true;
1435
    }
1436
 
1437
  if (dump_file)
1438
    fprintf (dump_file, "starting the processing of deferred insns\n");
1439
 
1440
  bitmap_copy (&tmp, &df->insns_to_delete);
1441
  EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1442
    {
1443
      struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1444
      if (insn_info)
1445
        df_insn_delete (NULL, uid);
1446
    }
1447
 
1448
  bitmap_copy (&tmp, &df->insns_to_rescan);
1449
  EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1450
    {
1451
      struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1452
      if (insn_info)
1453
        df_insn_rescan (insn_info->insn);
1454
    }
1455
 
1456
  bitmap_copy (&tmp, &df->insns_to_notes_rescan);
1457
  EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1458
    {
1459
      struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1460
      if (insn_info)
1461
        df_notes_rescan (insn_info->insn);
1462
    }
1463
 
1464
  if (dump_file)
1465
    fprintf (dump_file, "ending the processing of deferred insns\n");
1466
 
1467
  bitmap_clear (&tmp);
1468
  bitmap_clear (&df->insns_to_delete);
1469
  bitmap_clear (&df->insns_to_rescan);
1470
  bitmap_clear (&df->insns_to_notes_rescan);
1471
 
1472
  if (no_insn_rescan)
1473
    df_set_flags (DF_NO_INSN_RESCAN);
1474
  if (defer_insn_rescan)
1475
    df_set_flags (DF_DEFER_INSN_RESCAN);
1476
 
1477
  /* If someone changed regs_ever_live during this pass, fix up the
1478
     entry and exit blocks.  */
1479
  if (df->redo_entry_and_exit)
1480
    {
1481
      df_update_entry_exit_and_calls ();
1482
      df->redo_entry_and_exit = false;
1483
    }
1484
}
1485
 
1486
 
1487
/* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1488
   the uses if INCLUDE_USES. Include the eq_uses if
1489
   INCLUDE_EQ_USES.  */
1490
 
1491
static unsigned int
1492
df_count_refs (bool include_defs, bool include_uses,
1493
               bool include_eq_uses)
1494
{
1495
  unsigned int regno;
1496
  int size = 0;
1497
  unsigned int m = df->regs_inited;
1498
 
1499
  for (regno = 0; regno < m; regno++)
1500
    {
1501
      if (include_defs)
1502
        size += DF_REG_DEF_COUNT (regno);
1503
      if (include_uses)
1504
        size += DF_REG_USE_COUNT (regno);
1505
      if (include_eq_uses)
1506
        size += DF_REG_EQ_USE_COUNT (regno);
1507
    }
1508
  return size;
1509
}
1510
 
1511
 
1512
/* Take build ref table for either the uses or defs from the reg-use
1513
   or reg-def chains.  This version processes the refs in reg order
1514
   which is likely to be best if processing the whole function.  */
1515
 
1516
static void
1517
df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1518
                                  bool include_defs,
1519
                                  bool include_uses,
1520
                                  bool include_eq_uses)
1521
{
1522
  unsigned int m = df->regs_inited;
1523
  unsigned int regno;
1524
  unsigned int offset = 0;
1525
  unsigned int start;
1526
 
1527
  if (df->changeable_flags & DF_NO_HARD_REGS)
1528
    {
1529
      start = FIRST_PSEUDO_REGISTER;
1530
      memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1531
      memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1532
    }
1533
  else
1534
    start = 0;
1535
 
1536
  ref_info->total_size
1537
    = df_count_refs (include_defs, include_uses, include_eq_uses);
1538
 
1539
  df_check_and_grow_ref_info (ref_info, 1);
1540
 
1541
  for (regno = start; regno < m; regno++)
1542
    {
1543
      int count = 0;
1544
      ref_info->begin[regno] = offset;
1545
      if (include_defs)
1546
        {
1547
          df_ref ref = DF_REG_DEF_CHAIN (regno);
1548
          while (ref)
1549
            {
1550
              ref_info->refs[offset] = ref;
1551
              DF_REF_ID (ref) = offset++;
1552
              count++;
1553
              ref = DF_REF_NEXT_REG (ref);
1554
              gcc_checking_assert (offset < ref_info->refs_size);
1555
            }
1556
        }
1557
      if (include_uses)
1558
        {
1559
          df_ref ref = DF_REG_USE_CHAIN (regno);
1560
          while (ref)
1561
            {
1562
              ref_info->refs[offset] = ref;
1563
              DF_REF_ID (ref) = offset++;
1564
              count++;
1565
              ref = DF_REF_NEXT_REG (ref);
1566
              gcc_checking_assert (offset < ref_info->refs_size);
1567
            }
1568
        }
1569
      if (include_eq_uses)
1570
        {
1571
          df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1572
          while (ref)
1573
            {
1574
              ref_info->refs[offset] = ref;
1575
              DF_REF_ID (ref) = offset++;
1576
              count++;
1577
              ref = DF_REF_NEXT_REG (ref);
1578
              gcc_checking_assert (offset < ref_info->refs_size);
1579
            }
1580
        }
1581
      ref_info->count[regno] = count;
1582
    }
1583
 
1584
  /* The bitmap size is not decremented when refs are deleted.  So
1585
     reset it now that we have squished out all of the empty
1586
     slots.  */
1587
  ref_info->table_size = offset;
1588
}
1589
 
1590
 
1591
/* Take build ref table for either the uses or defs from the reg-use
1592
   or reg-def chains.  This version processes the refs in insn order
1593
   which is likely to be best if processing some segment of the
1594
   function.  */
1595
 
1596
static void
1597
df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1598
                                   bool include_defs,
1599
                                   bool include_uses,
1600
                                   bool include_eq_uses)
1601
{
1602
  bitmap_iterator bi;
1603
  unsigned int bb_index;
1604
  unsigned int m = df->regs_inited;
1605
  unsigned int offset = 0;
1606
  unsigned int r;
1607
  unsigned int start
1608
    = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1609
 
1610
  memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1611
  memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1612
 
1613
  ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1614
  df_check_and_grow_ref_info (ref_info, 1);
1615
 
1616
  EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1617
    {
1618
      basic_block bb = BASIC_BLOCK (bb_index);
1619
      rtx insn;
1620
      df_ref *ref_rec;
1621
 
1622
      if (include_defs)
1623
        for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1624
          {
1625
            unsigned int regno = DF_REF_REGNO (*ref_rec);
1626
            ref_info->count[regno]++;
1627
          }
1628
      if (include_uses)
1629
        for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1630
          {
1631
            unsigned int regno = DF_REF_REGNO (*ref_rec);
1632
            ref_info->count[regno]++;
1633
          }
1634
 
1635
      FOR_BB_INSNS (bb, insn)
1636
        {
1637
          if (INSN_P (insn))
1638
            {
1639
              unsigned int uid = INSN_UID (insn);
1640
 
1641
              if (include_defs)
1642
                for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1643
                  {
1644
                    unsigned int regno = DF_REF_REGNO (*ref_rec);
1645
                    ref_info->count[regno]++;
1646
                  }
1647
              if (include_uses)
1648
                for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1649
                  {
1650
                    unsigned int regno = DF_REF_REGNO (*ref_rec);
1651
                    ref_info->count[regno]++;
1652
                  }
1653
              if (include_eq_uses)
1654
                for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1655
                  {
1656
                    unsigned int regno = DF_REF_REGNO (*ref_rec);
1657
                    ref_info->count[regno]++;
1658
                  }
1659
            }
1660
        }
1661
    }
1662
 
1663
  for (r = start; r < m; r++)
1664
    {
1665
      ref_info->begin[r] = offset;
1666
      offset += ref_info->count[r];
1667
      ref_info->count[r] = 0;
1668
    }
1669
 
1670
  EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1671
    {
1672
      basic_block bb = BASIC_BLOCK (bb_index);
1673
      rtx insn;
1674
      df_ref *ref_rec;
1675
 
1676
      if (include_defs)
1677
        for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1678
          {
1679
            df_ref ref = *ref_rec;
1680
            unsigned int regno = DF_REF_REGNO (ref);
1681
            if (regno >= start)
1682
              {
1683
                unsigned int id
1684
                  = ref_info->begin[regno] + ref_info->count[regno]++;
1685
                DF_REF_ID (ref) = id;
1686
                ref_info->refs[id] = ref;
1687
              }
1688
          }
1689
      if (include_uses)
1690
        for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1691
          {
1692
            df_ref ref = *ref_rec;
1693
            unsigned int regno = DF_REF_REGNO (ref);
1694
            if (regno >= start)
1695
              {
1696
                unsigned int id
1697
                  = ref_info->begin[regno] + ref_info->count[regno]++;
1698
                DF_REF_ID (ref) = id;
1699
                ref_info->refs[id] = ref;
1700
              }
1701
          }
1702
 
1703
      FOR_BB_INSNS (bb, insn)
1704
        {
1705
          if (INSN_P (insn))
1706
            {
1707
              unsigned int uid = INSN_UID (insn);
1708
 
1709
              if (include_defs)
1710
                for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1711
                  {
1712
                    df_ref ref = *ref_rec;
1713
                    unsigned int regno = DF_REF_REGNO (ref);
1714
                    if (regno >= start)
1715
                      {
1716
                        unsigned int id
1717
                          = ref_info->begin[regno] + ref_info->count[regno]++;
1718
                        DF_REF_ID (ref) = id;
1719
                        ref_info->refs[id] = ref;
1720
                      }
1721
                  }
1722
              if (include_uses)
1723
                for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1724
                  {
1725
                    df_ref ref = *ref_rec;
1726
                    unsigned int regno = DF_REF_REGNO (ref);
1727
                    if (regno >= start)
1728
                      {
1729
                        unsigned int id
1730
                          = ref_info->begin[regno] + ref_info->count[regno]++;
1731
                        DF_REF_ID (ref) = id;
1732
                        ref_info->refs[id] = ref;
1733
                      }
1734
                  }
1735
              if (include_eq_uses)
1736
                for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1737
                  {
1738
                    df_ref ref = *ref_rec;
1739
                    unsigned int regno = DF_REF_REGNO (ref);
1740
                    if (regno >= start)
1741
                      {
1742
                        unsigned int id
1743
                          = ref_info->begin[regno] + ref_info->count[regno]++;
1744
                        DF_REF_ID (ref) = id;
1745
                        ref_info->refs[id] = ref;
1746
                      }
1747
                  }
1748
            }
1749
        }
1750
    }
1751
 
1752
  /* The bitmap size is not decremented when refs are deleted.  So
1753
     reset it now that we have squished out all of the empty
1754
     slots.  */
1755
 
1756
  ref_info->table_size = offset;
1757
}
1758
 
1759
/* Take build ref table for either the uses or defs from the reg-use
1760
   or reg-def chains.  */
1761
 
1762
static void
1763
df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1764
                           bool include_defs,
1765
                           bool include_uses,
1766
                           bool include_eq_uses)
1767
{
1768
  if (df->analyze_subset)
1769
    df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1770
                                       include_uses, include_eq_uses);
1771
  else
1772
    df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1773
                                       include_uses, include_eq_uses);
1774
}
1775
 
1776
 
1777
/* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET.  */
1778
static unsigned int
1779
df_add_refs_to_table (unsigned int offset,
1780
                      struct df_ref_info *ref_info,
1781
                      df_ref *ref_vec)
1782
{
1783
  while (*ref_vec)
1784
    {
1785
      df_ref ref = *ref_vec;
1786
      if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1787
          || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1788
        {
1789
          ref_info->refs[offset] = ref;
1790
          DF_REF_ID (*ref_vec) = offset++;
1791
        }
1792
      ref_vec++;
1793
    }
1794
  return offset;
1795
}
1796
 
1797
 
1798
/* Count the number of refs in all of the insns of BB. Include the
1799
   defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1800
   eq_uses if INCLUDE_EQ_USES.  */
1801
 
1802
static unsigned int
1803
df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1804
                               struct df_ref_info *ref_info,
1805
                               bool include_defs, bool include_uses,
1806
                               bool include_eq_uses)
1807
{
1808
  rtx insn;
1809
 
1810
  if (include_defs)
1811
    offset = df_add_refs_to_table (offset, ref_info,
1812
                                   df_get_artificial_defs (bb->index));
1813
  if (include_uses)
1814
    offset = df_add_refs_to_table (offset, ref_info,
1815
                                   df_get_artificial_uses (bb->index));
1816
 
1817
  FOR_BB_INSNS (bb, insn)
1818
    if (INSN_P (insn))
1819
      {
1820
        unsigned int uid = INSN_UID (insn);
1821
        if (include_defs)
1822
          offset = df_add_refs_to_table (offset, ref_info,
1823
                                         DF_INSN_UID_DEFS (uid));
1824
        if (include_uses)
1825
          offset = df_add_refs_to_table (offset, ref_info,
1826
                                         DF_INSN_UID_USES (uid));
1827
        if (include_eq_uses)
1828
          offset = df_add_refs_to_table (offset, ref_info,
1829
                                         DF_INSN_UID_EQ_USES (uid));
1830
      }
1831
  return offset;
1832
}
1833
 
1834
 
1835
/* Organize the refs by insn into the table in REF_INFO.  If
1836
   blocks_to_analyze is defined, use that set, otherwise the entire
1837
   program.  Include the defs if INCLUDE_DEFS. Include the uses if
1838
   INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES.  */
1839
 
1840
static void
1841
df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1842
                            bool include_defs, bool include_uses,
1843
                            bool include_eq_uses)
1844
{
1845
  basic_block bb;
1846
  unsigned int offset = 0;
1847
 
1848
  ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1849
  df_check_and_grow_ref_info (ref_info, 1);
1850
  if (df->blocks_to_analyze)
1851
    {
1852
      bitmap_iterator bi;
1853
      unsigned int index;
1854
 
1855
      EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1856
        {
1857
          offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1858
                                                  include_defs, include_uses,
1859
                                                  include_eq_uses);
1860
        }
1861
 
1862
      ref_info->table_size = offset;
1863
    }
1864
  else
1865
    {
1866
      FOR_ALL_BB (bb)
1867
        offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1868
                                                include_defs, include_uses,
1869
                                                include_eq_uses);
1870
      ref_info->table_size = offset;
1871
    }
1872
}
1873
 
1874
 
1875
/* If the use refs in DF are not organized, reorganize them.  */
1876
 
1877
void
1878
df_maybe_reorganize_use_refs (enum df_ref_order order)
1879
{
1880
  if (order == df->use_info.ref_order)
1881
    return;
1882
 
1883
  switch (order)
1884
    {
1885
    case DF_REF_ORDER_BY_REG:
1886
      df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1887
      break;
1888
 
1889
    case DF_REF_ORDER_BY_REG_WITH_NOTES:
1890
      df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1891
      break;
1892
 
1893
    case DF_REF_ORDER_BY_INSN:
1894
      df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1895
      break;
1896
 
1897
    case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1898
      df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1899
      break;
1900
 
1901
    case DF_REF_ORDER_NO_TABLE:
1902
      free (df->use_info.refs);
1903
      df->use_info.refs = NULL;
1904
      df->use_info.refs_size = 0;
1905
      break;
1906
 
1907
    case DF_REF_ORDER_UNORDERED:
1908
    case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1909
      gcc_unreachable ();
1910
      break;
1911
    }
1912
 
1913
  df->use_info.ref_order = order;
1914
}
1915
 
1916
 
1917
/* If the def refs in DF are not organized, reorganize them.  */
1918
 
1919
void
1920
df_maybe_reorganize_def_refs (enum df_ref_order order)
1921
{
1922
  if (order == df->def_info.ref_order)
1923
    return;
1924
 
1925
  switch (order)
1926
    {
1927
    case DF_REF_ORDER_BY_REG:
1928
      df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1929
      break;
1930
 
1931
    case DF_REF_ORDER_BY_INSN:
1932
      df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1933
      break;
1934
 
1935
    case DF_REF_ORDER_NO_TABLE:
1936
      free (df->def_info.refs);
1937
      df->def_info.refs = NULL;
1938
      df->def_info.refs_size = 0;
1939
      break;
1940
 
1941
    case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1942
    case DF_REF_ORDER_BY_REG_WITH_NOTES:
1943
    case DF_REF_ORDER_UNORDERED:
1944
    case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1945
      gcc_unreachable ();
1946
      break;
1947
    }
1948
 
1949
  df->def_info.ref_order = order;
1950
}
1951
 
1952
 
1953
/* Change all of the basic block references in INSN to use the insn's
1954
   current basic block.  This function is called from routines that move
1955
   instructions from one block to another.  */
1956
 
1957
void
1958
df_insn_change_bb (rtx insn, basic_block new_bb)
1959
{
1960
  basic_block old_bb = BLOCK_FOR_INSN (insn);
1961
  struct df_insn_info *insn_info;
1962
  unsigned int uid = INSN_UID (insn);
1963
 
1964
  if (old_bb == new_bb)
1965
    return;
1966
 
1967
  set_block_for_insn (insn, new_bb);
1968
 
1969
  if (!df)
1970
    return;
1971
 
1972
  if (dump_file)
1973
    fprintf (dump_file, "changing bb of uid %d\n", uid);
1974
 
1975
  insn_info = DF_INSN_UID_SAFE_GET (uid);
1976
  if (insn_info == NULL)
1977
    {
1978
      if (dump_file)
1979
        fprintf (dump_file, "  unscanned insn\n");
1980
      df_insn_rescan (insn);
1981
      return;
1982
    }
1983
 
1984
  if (!INSN_P (insn))
1985
    return;
1986
 
1987
  df_set_bb_dirty (new_bb);
1988
  if (old_bb)
1989
    {
1990
      if (dump_file)
1991
        fprintf (dump_file, "  from %d to %d\n",
1992
                 old_bb->index, new_bb->index);
1993
      df_set_bb_dirty (old_bb);
1994
    }
1995
  else
1996
    if (dump_file)
1997
      fprintf (dump_file, "  to %d\n", new_bb->index);
1998
}
1999
 
2000
 
2001
/* Helper function for df_ref_change_reg_with_loc.  */
2002
 
2003
static void
2004
df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
2005
                              struct df_reg_info *new_df,
2006
                              int new_regno, rtx loc)
2007
{
2008
  df_ref the_ref = old_df->reg_chain;
2009
 
2010
  while (the_ref)
2011
    {
2012
      if ((!DF_REF_IS_ARTIFICIAL (the_ref))
2013
          && DF_REF_LOC (the_ref)
2014
          && (*DF_REF_LOC (the_ref) == loc))
2015
        {
2016
          df_ref next_ref = DF_REF_NEXT_REG (the_ref);
2017
          df_ref prev_ref = DF_REF_PREV_REG (the_ref);
2018
          df_ref *ref_vec, *ref_vec_t;
2019
          struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
2020
          unsigned int count = 0;
2021
 
2022
          DF_REF_REGNO (the_ref) = new_regno;
2023
          DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
2024
 
2025
          /* Pull the_ref out of the old regno chain.  */
2026
          if (prev_ref)
2027
            DF_REF_NEXT_REG (prev_ref) = next_ref;
2028
          else
2029
            old_df->reg_chain = next_ref;
2030
          if (next_ref)
2031
            DF_REF_PREV_REG (next_ref) = prev_ref;
2032
          old_df->n_refs--;
2033
 
2034
          /* Put the ref into the new regno chain.  */
2035
          DF_REF_PREV_REG (the_ref) = NULL;
2036
          DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
2037
          if (new_df->reg_chain)
2038
            DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
2039
          new_df->reg_chain = the_ref;
2040
          new_df->n_refs++;
2041
          if (DF_REF_BB (the_ref))
2042
            df_set_bb_dirty (DF_REF_BB (the_ref));
2043
 
2044
          /* Need to sort the record again that the ref was in because
2045
             the regno is a sorting key.  First, find the right
2046
             record.  */
2047
          if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
2048
            ref_vec = insn_info->eq_uses;
2049
          else
2050
            ref_vec = insn_info->uses;
2051
          if (dump_file)
2052
            fprintf (dump_file, "changing reg in insn %d\n",
2053
                     DF_REF_INSN_UID (the_ref));
2054
 
2055
          ref_vec_t = ref_vec;
2056
 
2057
          /* Find the length.  */
2058
          while (*ref_vec_t)
2059
            {
2060
              count++;
2061
              ref_vec_t++;
2062
            }
2063
          qsort (ref_vec, count, sizeof (df_ref ), df_ref_compare);
2064
 
2065
          the_ref = next_ref;
2066
        }
2067
      else
2068
        the_ref = DF_REF_NEXT_REG (the_ref);
2069
    }
2070
}
2071
 
2072
 
2073
/* Change the regno of all refs that contained LOC from OLD_REGNO to
2074
   NEW_REGNO.  Refs that do not match LOC are not changed which means
2075
   that artificial refs are not changed since they have no loc.  This
2076
   call is to support the SET_REGNO macro. */
2077
 
2078
void
2079
df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
2080
{
2081
  if ((!df) || (old_regno == -1) || (old_regno == new_regno))
2082
    return;
2083
 
2084
  df_grow_reg_info ();
2085
 
2086
  df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
2087
                                DF_REG_DEF_GET (new_regno), new_regno, loc);
2088
  df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
2089
                                DF_REG_USE_GET (new_regno), new_regno, loc);
2090
  df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
2091
                                DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
2092
}
2093
 
2094
 
2095
/* Delete the mw_hardregs that point into the eq_notes.  */
2096
 
2097
static unsigned int
2098
df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
2099
{
2100
  struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
2101
  unsigned int deleted = 0;
2102
  unsigned int count = 0;
2103
  struct df_scan_problem_data *problem_data
2104
    = (struct df_scan_problem_data *) df_scan->problem_data;
2105
 
2106
  if (!*mw_vec)
2107
    return 0;
2108
 
2109
  while (*mw_vec)
2110
    {
2111
      if ((*mw_vec)->flags & DF_REF_IN_NOTE)
2112
        {
2113
          struct df_mw_hardreg **temp_vec = mw_vec;
2114
 
2115
          pool_free (problem_data->mw_reg_pool, *mw_vec);
2116
          temp_vec = mw_vec;
2117
          /* Shove the remaining ones down one to fill the gap.  While
2118
             this looks n**2, it is highly unusual to have any mw regs
2119
             in eq_notes and the chances of more than one are almost
2120
             non existent.  */
2121
          while (*temp_vec)
2122
            {
2123
              *temp_vec = *(temp_vec + 1);
2124
              temp_vec++;
2125
            }
2126
          deleted++;
2127
        }
2128
      else
2129
        {
2130
          mw_vec++;
2131
          count++;
2132
        }
2133
    }
2134
 
2135
  if (count == 0)
2136
    {
2137
      df_scan_free_mws_vec (insn_info->mw_hardregs);
2138
      insn_info->mw_hardregs = df_null_mw_rec;
2139
      return 0;
2140
    }
2141
  return deleted;
2142
}
2143
 
2144
 
2145
/* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN.  */
2146
 
2147
void
2148
df_notes_rescan (rtx insn)
2149
{
2150
  struct df_insn_info *insn_info;
2151
  unsigned int uid = INSN_UID (insn);
2152
 
2153
  if (!df)
2154
    return;
2155
 
2156
  /* The client has disabled rescanning and plans to do it itself.  */
2157
  if (df->changeable_flags & DF_NO_INSN_RESCAN)
2158
    return;
2159
 
2160
  /* Do nothing if the insn hasn't been emitted yet.  */
2161
  if (!BLOCK_FOR_INSN (insn))
2162
    return;
2163
 
2164
  df_grow_bb_info (df_scan);
2165
  df_grow_reg_info ();
2166
 
2167
  insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2168
 
2169
  /* The client has deferred rescanning.  */
2170
  if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2171
    {
2172
      if (!insn_info)
2173
        {
2174
          insn_info = df_insn_create_insn_record (insn);
2175
          insn_info->defs = df_null_ref_rec;
2176
          insn_info->uses = df_null_ref_rec;
2177
          insn_info->eq_uses = df_null_ref_rec;
2178
          insn_info->mw_hardregs = df_null_mw_rec;
2179
        }
2180
 
2181
      bitmap_clear_bit (&df->insns_to_delete, uid);
2182
      /* If the insn is set to be rescanned, it does not need to also
2183
         be notes rescanned.  */
2184
      if (!bitmap_bit_p (&df->insns_to_rescan, uid))
2185
        bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
2186
      return;
2187
    }
2188
 
2189
  bitmap_clear_bit (&df->insns_to_delete, uid);
2190
  bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
2191
 
2192
  if (insn_info)
2193
    {
2194
      basic_block bb = BLOCK_FOR_INSN (insn);
2195
      rtx note;
2196
      struct df_collection_rec collection_rec;
2197
      unsigned int num_deleted;
2198
      unsigned int mw_len;
2199
 
2200
      memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2201
      collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
2202
      collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
2203
 
2204
      num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2205
      df_ref_chain_delete (insn_info->eq_uses);
2206
      insn_info->eq_uses = NULL;
2207
 
2208
      /* Process REG_EQUIV/REG_EQUAL notes */
2209
      for (note = REG_NOTES (insn); note;
2210
           note = XEXP (note, 1))
2211
        {
2212
          switch (REG_NOTE_KIND (note))
2213
            {
2214
            case REG_EQUIV:
2215
            case REG_EQUAL:
2216
              df_uses_record (&collection_rec,
2217
                              &XEXP (note, 0), DF_REF_REG_USE,
2218
                              bb, insn_info, DF_REF_IN_NOTE);
2219
            default:
2220
              break;
2221
            }
2222
        }
2223
 
2224
      /* Find some place to put any new mw_hardregs.  */
2225
      df_canonize_collection_rec (&collection_rec);
2226
      mw_len = VEC_length (df_mw_hardreg_ptr, collection_rec.mw_vec);
2227
      if (mw_len)
2228
        {
2229
          unsigned int count = 0;
2230
          struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2231
          while (*mw_rec)
2232
            {
2233
              count++;
2234
              mw_rec++;
2235
            }
2236
 
2237
          if (count)
2238
            {
2239
              /* Append to the end of the existing record after
2240
                 expanding it if necessary.  */
2241
              if (mw_len > num_deleted)
2242
                {
2243
                  insn_info->mw_hardregs =
2244
                    XRESIZEVEC (struct df_mw_hardreg *,
2245
                                insn_info->mw_hardregs,
2246
                                count + 1 + mw_len);
2247
                }
2248
              memcpy (&insn_info->mw_hardregs[count],
2249
                      VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2250
                      mw_len * sizeof (struct df_mw_hardreg *));
2251
              insn_info->mw_hardregs[count + mw_len] = NULL;
2252
              qsort (insn_info->mw_hardregs, count + mw_len,
2253
                     sizeof (struct df_mw_hardreg *), df_mw_compare);
2254
            }
2255
          else
2256
            {
2257
              /* No vector there. */
2258
              insn_info->mw_hardregs
2259
                = XNEWVEC (struct df_mw_hardreg*, 1 + mw_len);
2260
              memcpy (insn_info->mw_hardregs,
2261
                      VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2262
                      mw_len * sizeof (struct df_mw_hardreg *));
2263
              insn_info->mw_hardregs[mw_len] = NULL;
2264
            }
2265
        }
2266
      /* Get rid of the mw_rec so that df_refs_add_to_chains will
2267
         ignore it.  */
2268
      VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
2269
      df_refs_add_to_chains (&collection_rec, bb, insn);
2270
      VEC_free (df_ref, stack, collection_rec.eq_use_vec);
2271
    }
2272
  else
2273
    df_insn_rescan (insn);
2274
 
2275
}
2276
 
2277
 
2278
/*----------------------------------------------------------------------------
2279
   Hard core instruction scanning code.  No external interfaces here,
2280
   just a lot of routines that look inside insns.
2281
----------------------------------------------------------------------------*/
2282
 
2283
 
2284
/* Return true if the contents of two df_ref's are identical.
2285
   It ignores DF_REF_MARKER.  */
2286
 
2287
static bool
2288
df_ref_equal_p (df_ref ref1, df_ref ref2)
2289
{
2290
  if (!ref2)
2291
    return false;
2292
 
2293
  if (ref1 == ref2)
2294
    return true;
2295
 
2296
  if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2297
      || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2298
      || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2299
      || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2300
      || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2301
          != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2302
      || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2303
      || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2304
    return false;
2305
 
2306
  switch (DF_REF_CLASS (ref1))
2307
    {
2308
    case DF_REF_ARTIFICIAL:
2309
    case DF_REF_BASE:
2310
      return true;
2311
 
2312
    case DF_REF_REGULAR:
2313
      return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2314
 
2315
    default:
2316
      gcc_unreachable ();
2317
    }
2318
  return false;
2319
}
2320
 
2321
 
2322
/* Compare REF1 and REF2 for sorting.  This is only called from places
2323
   where all of the refs are of the same type, in the same insn, and
2324
   have the same bb.  So these fields are not checked.  */
2325
 
2326
static int
2327
df_ref_compare (const void *r1, const void *r2)
2328
{
2329
  const df_ref ref1 = *(const df_ref *)r1;
2330
  const df_ref ref2 = *(const df_ref *)r2;
2331
 
2332
  if (ref1 == ref2)
2333
    return 0;
2334
 
2335
  if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2336
    return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2337
 
2338
  if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2339
    return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2340
 
2341
  if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2342
    return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2343
 
2344
  if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2345
    return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2346
 
2347
  /* Cannot look at the LOC field on artificial refs.  */
2348
  if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2349
      && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2350
    return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2351
 
2352
  if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2353
    {
2354
      /* If two refs are identical except that one of them has is from
2355
         a mw and one is not, we need to have the one with the mw
2356
         first.  */
2357
      if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2358
          DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2359
        return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2360
      else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2361
        return -1;
2362
      else
2363
        return 1;
2364
    }
2365
 
2366
  return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2367
}
2368
 
2369
static void
2370
df_swap_refs (VEC(df_ref,stack) **ref_vec, int i, int j)
2371
{
2372
  df_ref tmp = VEC_index (df_ref, *ref_vec, i);
2373
  VEC_replace (df_ref, *ref_vec, i, VEC_index (df_ref, *ref_vec, j));
2374
  VEC_replace (df_ref, *ref_vec, j, tmp);
2375
}
2376
 
2377
/* Sort and compress a set of refs.  */
2378
 
2379
static void
2380
df_sort_and_compress_refs (VEC(df_ref,stack) **ref_vec)
2381
{
2382
  unsigned int count;
2383
  unsigned int i;
2384
  unsigned int dist = 0;
2385
 
2386
  count = VEC_length (df_ref, *ref_vec);
2387
 
2388
  /* If there are 1 or 0 elements, there is nothing to do.  */
2389
  if (count < 2)
2390
    return;
2391
  else if (count == 2)
2392
    {
2393
      df_ref r0 = VEC_index (df_ref, *ref_vec, 0);
2394
      df_ref r1 = VEC_index (df_ref, *ref_vec, 1);
2395
      if (df_ref_compare (&r0, &r1) > 0)
2396
        df_swap_refs (ref_vec, 0, 1);
2397
    }
2398
  else
2399
    {
2400
      for (i = 0; i < count - 1; i++)
2401
        {
2402
          df_ref r0 = VEC_index (df_ref, *ref_vec, i);
2403
          df_ref r1 = VEC_index (df_ref, *ref_vec, i + 1);
2404
          if (df_ref_compare (&r0, &r1) >= 0)
2405
            break;
2406
        }
2407
      /* If the array is already strictly ordered,
2408
         which is the most common case for large COUNT case
2409
         (which happens for CALL INSNs),
2410
         no need to sort and filter out duplicate.
2411
         Simply return the count.
2412
         Make sure DF_GET_ADD_REFS adds refs in the increasing order
2413
         of DF_REF_COMPARE.  */
2414
      if (i == count - 1)
2415
        return;
2416
      VEC_qsort (df_ref, *ref_vec, df_ref_compare);
2417
    }
2418
 
2419
  for (i=0; i<count-dist; i++)
2420
    {
2421
      /* Find the next ref that is not equal to the current ref.  */
2422
      while (i + dist + 1 < count
2423
             && df_ref_equal_p (VEC_index (df_ref, *ref_vec, i),
2424
                                VEC_index (df_ref, *ref_vec, i + dist + 1)))
2425
        {
2426
          df_free_ref (VEC_index (df_ref, *ref_vec, i + dist + 1));
2427
          dist++;
2428
        }
2429
      /* Copy it down to the next position.  */
2430
      if (dist && i + dist + 1 < count)
2431
        VEC_replace (df_ref, *ref_vec, i + 1,
2432
                     VEC_index (df_ref, *ref_vec, i + dist + 1));
2433
    }
2434
 
2435
  count -= dist;
2436
  VEC_truncate (df_ref, *ref_vec, count);
2437
}
2438
 
2439
 
2440
/* Return true if the contents of two df_ref's are identical.
2441
   It ignores DF_REF_MARKER.  */
2442
 
2443
static bool
2444
df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2445
{
2446
  if (!mw2)
2447
    return false;
2448
  return (mw1 == mw2) ||
2449
    (mw1->mw_reg == mw2->mw_reg
2450
     && mw1->type == mw2->type
2451
     && mw1->flags == mw2->flags
2452
     && mw1->start_regno == mw2->start_regno
2453
     && mw1->end_regno == mw2->end_regno);
2454
}
2455
 
2456
 
2457
/* Compare MW1 and MW2 for sorting.  */
2458
 
2459
static int
2460
df_mw_compare (const void *m1, const void *m2)
2461
{
2462
  const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
2463
  const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
2464
 
2465
  if (mw1 == mw2)
2466
    return 0;
2467
 
2468
  if (mw1->type != mw2->type)
2469
    return mw1->type - mw2->type;
2470
 
2471
  if (mw1->flags != mw2->flags)
2472
    return mw1->flags - mw2->flags;
2473
 
2474
  if (mw1->start_regno != mw2->start_regno)
2475
    return mw1->start_regno - mw2->start_regno;
2476
 
2477
  if (mw1->end_regno != mw2->end_regno)
2478
    return mw1->end_regno - mw2->end_regno;
2479
 
2480
  if (mw1->mw_reg != mw2->mw_reg)
2481
    return mw1->mw_order - mw2->mw_order;
2482
 
2483
  return 0;
2484
}
2485
 
2486
 
2487
/* Sort and compress a set of refs.  */
2488
 
2489
static void
2490
df_sort_and_compress_mws (VEC(df_mw_hardreg_ptr,stack) **mw_vec)
2491
{
2492
  unsigned int count;
2493
  struct df_scan_problem_data *problem_data
2494
    = (struct df_scan_problem_data *) df_scan->problem_data;
2495
  unsigned int i;
2496
  unsigned int dist = 0;
2497
 
2498
  count = VEC_length (df_mw_hardreg_ptr, *mw_vec);
2499
  if (count < 2)
2500
    return;
2501
  else if (count == 2)
2502
    {
2503
      struct df_mw_hardreg *m0 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 0);
2504
      struct df_mw_hardreg *m1 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 1);
2505
      if (df_mw_compare (&m0, &m1) > 0)
2506
        {
2507
          struct df_mw_hardreg *tmp = VEC_index (df_mw_hardreg_ptr,
2508
                                                 *mw_vec, 0);
2509
          VEC_replace (df_mw_hardreg_ptr, *mw_vec, 0,
2510
                       VEC_index (df_mw_hardreg_ptr, *mw_vec, 1));
2511
          VEC_replace (df_mw_hardreg_ptr, *mw_vec, 1, tmp);
2512
        }
2513
    }
2514
  else
2515
    VEC_qsort (df_mw_hardreg_ptr, *mw_vec, df_mw_compare);
2516
 
2517
  for (i=0; i<count-dist; i++)
2518
    {
2519
      /* Find the next ref that is not equal to the current ref.  */
2520
      while (i + dist + 1 < count
2521
             && df_mw_equal_p (VEC_index (df_mw_hardreg_ptr, *mw_vec, i),
2522
                               VEC_index (df_mw_hardreg_ptr, *mw_vec,
2523
                                          i + dist + 1)))
2524
        {
2525
          pool_free (problem_data->mw_reg_pool,
2526
                     VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
2527
          dist++;
2528
        }
2529
      /* Copy it down to the next position.  */
2530
      if (dist && i + dist + 1 < count)
2531
        VEC_replace (df_mw_hardreg_ptr, *mw_vec, i + 1,
2532
                     VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
2533
    }
2534
 
2535
  count -= dist;
2536
  VEC_truncate (df_mw_hardreg_ptr, *mw_vec, count);
2537
}
2538
 
2539
 
2540
/* Sort and remove duplicates from the COLLECTION_REC.  */
2541
 
2542
static void
2543
df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2544
{
2545
  df_sort_and_compress_refs (&collection_rec->def_vec);
2546
  df_sort_and_compress_refs (&collection_rec->use_vec);
2547
  df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2548
  df_sort_and_compress_mws (&collection_rec->mw_vec);
2549
}
2550
 
2551
 
2552
/* Add the new df_ref to appropriate reg_info/ref_info chains.  */
2553
 
2554
static void
2555
df_install_ref (df_ref this_ref,
2556
                struct df_reg_info *reg_info,
2557
                struct df_ref_info *ref_info,
2558
                bool add_to_table)
2559
{
2560
  unsigned int regno = DF_REF_REGNO (this_ref);
2561
  /* Add the ref to the reg_{def,use,eq_use} chain.  */
2562
  df_ref head = reg_info->reg_chain;
2563
 
2564
  reg_info->reg_chain = this_ref;
2565
  reg_info->n_refs++;
2566
 
2567
  if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2568
    {
2569
      gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2570
      df->hard_regs_live_count[regno]++;
2571
    }
2572
 
2573
  gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL
2574
                       && DF_REF_PREV_REG (this_ref) == NULL);
2575
 
2576
  DF_REF_NEXT_REG (this_ref) = head;
2577
 
2578
  /* We cannot actually link to the head of the chain.  */
2579
  DF_REF_PREV_REG (this_ref) = NULL;
2580
 
2581
  if (head)
2582
    DF_REF_PREV_REG (head) = this_ref;
2583
 
2584
  if (add_to_table)
2585
    {
2586
      gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2587
      df_check_and_grow_ref_info (ref_info, 1);
2588
      DF_REF_ID (this_ref) = ref_info->table_size;
2589
      /* Add the ref to the big array of defs.  */
2590
      ref_info->refs[ref_info->table_size] = this_ref;
2591
      ref_info->table_size++;
2592
    }
2593
  else
2594
    DF_REF_ID (this_ref) = -1;
2595
 
2596
  ref_info->total_size++;
2597
}
2598
 
2599
 
2600
/* This function takes one of the groups of refs (defs, uses or
2601
   eq_uses) and installs the entire group into the insn.  It also adds
2602
   each of these refs into the appropriate chains.  */
2603
 
2604
static df_ref *
2605
df_install_refs (basic_block bb,
2606
                 VEC(df_ref,stack)* old_vec,
2607
                 struct df_reg_info **reg_info,
2608
                 struct df_ref_info *ref_info,
2609
                 bool is_notes)
2610
{
2611
  unsigned int count;
2612
 
2613
  count = VEC_length (df_ref, old_vec);
2614
  if (count)
2615
    {
2616
      df_ref *new_vec = XNEWVEC (df_ref, count + 1);
2617
      bool add_to_table;
2618
      df_ref this_ref;
2619
      unsigned int ix;
2620
 
2621
      switch (ref_info->ref_order)
2622
        {
2623
        case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2624
        case DF_REF_ORDER_BY_REG_WITH_NOTES:
2625
        case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2626
          ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2627
          add_to_table = true;
2628
          break;
2629
        case DF_REF_ORDER_UNORDERED:
2630
        case DF_REF_ORDER_BY_REG:
2631
        case DF_REF_ORDER_BY_INSN:
2632
          ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2633
          add_to_table = !is_notes;
2634
          break;
2635
        default:
2636
          add_to_table = false;
2637
          break;
2638
        }
2639
 
2640
      /* Do not add if ref is not in the right blocks.  */
2641
      if (add_to_table && df->analyze_subset)
2642
        add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2643
 
2644
      FOR_EACH_VEC_ELT (df_ref, old_vec, ix, this_ref)
2645
        {
2646
          new_vec[ix] = this_ref;
2647
          df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2648
                          ref_info, add_to_table);
2649
        }
2650
 
2651
      new_vec[count] = NULL;
2652
      return new_vec;
2653
    }
2654
  else
2655
    return df_null_ref_rec;
2656
}
2657
 
2658
 
2659
/* This function takes the mws installs the entire group into the
2660
   insn.  */
2661
 
2662
static struct df_mw_hardreg **
2663
df_install_mws (VEC(df_mw_hardreg_ptr,stack) *old_vec)
2664
{
2665
  unsigned int count;
2666
 
2667
  count = VEC_length (df_mw_hardreg_ptr, old_vec);
2668
  if (count)
2669
    {
2670
      struct df_mw_hardreg **new_vec
2671
        = XNEWVEC (struct df_mw_hardreg*, count + 1);
2672
      memcpy (new_vec, VEC_address (df_mw_hardreg_ptr, old_vec),
2673
              sizeof (struct df_mw_hardreg*) * count);
2674
      new_vec[count] = NULL;
2675
      return new_vec;
2676
    }
2677
  else
2678
    return df_null_mw_rec;
2679
}
2680
 
2681
 
2682
/* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2683
   chains and update other necessary information.  */
2684
 
2685
static void
2686
df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2687
                       basic_block bb, rtx insn)
2688
{
2689
  if (insn)
2690
    {
2691
      struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2692
      /* If there is a vector in the collection rec, add it to the
2693
         insn.  A null rec is a signal that the caller will handle the
2694
         chain specially.  */
2695
      if (collection_rec->def_vec)
2696
        {
2697
          df_scan_free_ref_vec (insn_rec->defs);
2698
          insn_rec->defs
2699
            = df_install_refs (bb, collection_rec->def_vec,
2700
                               df->def_regs,
2701
                               &df->def_info, false);
2702
        }
2703
      if (collection_rec->use_vec)
2704
        {
2705
          df_scan_free_ref_vec (insn_rec->uses);
2706
          insn_rec->uses
2707
            = df_install_refs (bb, collection_rec->use_vec,
2708
                               df->use_regs,
2709
                               &df->use_info, false);
2710
        }
2711
      if (collection_rec->eq_use_vec)
2712
        {
2713
          df_scan_free_ref_vec (insn_rec->eq_uses);
2714
          insn_rec->eq_uses
2715
            = df_install_refs (bb, collection_rec->eq_use_vec,
2716
                               df->eq_use_regs,
2717
                               &df->use_info, true);
2718
        }
2719
      if (collection_rec->mw_vec)
2720
        {
2721
          df_scan_free_mws_vec (insn_rec->mw_hardregs);
2722
          insn_rec->mw_hardregs
2723
            = df_install_mws (collection_rec->mw_vec);
2724
        }
2725
    }
2726
  else
2727
    {
2728
      struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2729
 
2730
      df_scan_free_ref_vec (bb_info->artificial_defs);
2731
      bb_info->artificial_defs
2732
        = df_install_refs (bb, collection_rec->def_vec,
2733
                           df->def_regs,
2734
                           &df->def_info, false);
2735
      df_scan_free_ref_vec (bb_info->artificial_uses);
2736
      bb_info->artificial_uses
2737
        = df_install_refs (bb, collection_rec->use_vec,
2738
                           df->use_regs,
2739
                           &df->use_info, false);
2740
    }
2741
}
2742
 
2743
 
2744
/* Allocate a ref and initialize its fields.  */
2745
 
2746
static df_ref
2747
df_ref_create_structure (enum df_ref_class cl,
2748
                         struct df_collection_rec *collection_rec,
2749
                         rtx reg, rtx *loc,
2750
                         basic_block bb, struct df_insn_info *info,
2751
                         enum df_ref_type ref_type,
2752
                         int ref_flags)
2753
{
2754
  df_ref this_ref = NULL;
2755
  int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2756
  struct df_scan_problem_data *problem_data
2757
    = (struct df_scan_problem_data *) df_scan->problem_data;
2758
 
2759
  switch (cl)
2760
    {
2761
    case DF_REF_BASE:
2762
      this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
2763
      gcc_checking_assert (loc == NULL);
2764
      break;
2765
 
2766
    case DF_REF_ARTIFICIAL:
2767
      this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
2768
      this_ref->artificial_ref.bb = bb;
2769
      gcc_checking_assert (loc == NULL);
2770
      break;
2771
 
2772
    case DF_REF_REGULAR:
2773
      this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
2774
      this_ref->regular_ref.loc = loc;
2775
      gcc_checking_assert (loc);
2776
      break;
2777
    }
2778
 
2779
  DF_REF_CLASS (this_ref) = cl;
2780
  DF_REF_ID (this_ref) = -1;
2781
  DF_REF_REG (this_ref) = reg;
2782
  DF_REF_REGNO (this_ref) =  regno;
2783
  DF_REF_TYPE (this_ref) = ref_type;
2784
  DF_REF_INSN_INFO (this_ref) = info;
2785
  DF_REF_CHAIN (this_ref) = NULL;
2786
  DF_REF_FLAGS (this_ref) = ref_flags;
2787
  DF_REF_NEXT_REG (this_ref) = NULL;
2788
  DF_REF_PREV_REG (this_ref) = NULL;
2789
  DF_REF_ORDER (this_ref) = df->ref_order++;
2790
 
2791
  /* We need to clear this bit because fwprop, and in the future
2792
     possibly other optimizations sometimes create new refs using ond
2793
     refs as the model.  */
2794
  DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2795
 
2796
  /* See if this ref needs to have DF_HARD_REG_LIVE bit set.  */
2797
  if (regno < FIRST_PSEUDO_REGISTER
2798
      && !DF_REF_IS_ARTIFICIAL (this_ref)
2799
      && !DEBUG_INSN_P (DF_REF_INSN (this_ref)))
2800
    {
2801
      if (DF_REF_REG_DEF_P (this_ref))
2802
        {
2803
          if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2804
            DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2805
        }
2806
      else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2807
                 && (regno == FRAME_POINTER_REGNUM
2808
                     || regno == ARG_POINTER_REGNUM)))
2809
        DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2810
    }
2811
 
2812
  if (collection_rec)
2813
    {
2814
      if (DF_REF_REG_DEF_P (this_ref))
2815
        VEC_safe_push (df_ref, stack, collection_rec->def_vec, this_ref);
2816
      else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2817
        VEC_safe_push (df_ref, stack, collection_rec->eq_use_vec, this_ref);
2818
      else
2819
        VEC_safe_push (df_ref, stack, collection_rec->use_vec, this_ref);
2820
    }
2821
  else
2822
    df_install_ref_incremental (this_ref);
2823
 
2824
  return this_ref;
2825
}
2826
 
2827
 
2828
/* Create new references of type DF_REF_TYPE for each part of register REG
2829
   at address LOC within INSN of BB.  */
2830
 
2831
 
2832
static void
2833
df_ref_record (enum df_ref_class cl,
2834
               struct df_collection_rec *collection_rec,
2835
               rtx reg, rtx *loc,
2836
               basic_block bb, struct df_insn_info *insn_info,
2837
               enum df_ref_type ref_type,
2838
               int ref_flags)
2839
{
2840
  unsigned int regno;
2841
 
2842
  gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2843
 
2844
  regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2845
  if (regno < FIRST_PSEUDO_REGISTER)
2846
    {
2847
      struct df_mw_hardreg *hardreg = NULL;
2848
      struct df_scan_problem_data *problem_data
2849
        = (struct df_scan_problem_data *) df_scan->problem_data;
2850
      unsigned int i;
2851
      unsigned int endregno;
2852
      df_ref ref;
2853
 
2854
      if (GET_CODE (reg) == SUBREG)
2855
        {
2856
          regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2857
                                        SUBREG_BYTE (reg), GET_MODE (reg));
2858
          endregno = regno + subreg_nregs (reg);
2859
        }
2860
      else
2861
        endregno = END_HARD_REGNO (reg);
2862
 
2863
      /*  If this is a multiword hardreg, we create some extra
2864
          datastructures that will enable us to easily build REG_DEAD
2865
          and REG_UNUSED notes.  */
2866
      if (collection_rec
2867
          && (endregno != regno + 1) && insn_info)
2868
        {
2869
          /* Sets to a subreg of a multiword register are partial.
2870
             Sets to a non-subreg of a multiword register are not.  */
2871
          if (GET_CODE (reg) == SUBREG)
2872
            ref_flags |= DF_REF_PARTIAL;
2873
          ref_flags |= DF_REF_MW_HARDREG;
2874
 
2875
          hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
2876
          hardreg->type = ref_type;
2877
          hardreg->flags = ref_flags;
2878
          hardreg->mw_reg = reg;
2879
          hardreg->start_regno = regno;
2880
          hardreg->end_regno = endregno - 1;
2881
          hardreg->mw_order = df->ref_order++;
2882
          VEC_safe_push (df_mw_hardreg_ptr, stack, collection_rec->mw_vec,
2883
                         hardreg);
2884
        }
2885
 
2886
      for (i = regno; i < endregno; i++)
2887
        {
2888
          ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2889
                                         bb, insn_info, ref_type, ref_flags);
2890
 
2891
          gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2892
        }
2893
    }
2894
  else
2895
    {
2896
      df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2897
                               ref_type, ref_flags);
2898
    }
2899
}
2900
 
2901
 
2902
/* A set to a non-paradoxical SUBREG for which the number of word_mode units
2903
   covered by the outer mode is smaller than that covered by the inner mode,
2904
   is a read-modify-write operation.
2905
   This function returns true iff the SUBREG X is such a SUBREG.  */
2906
 
2907
bool
2908
df_read_modify_subreg_p (rtx x)
2909
{
2910
  unsigned int isize, osize;
2911
  if (GET_CODE (x) != SUBREG)
2912
    return false;
2913
  isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2914
  osize = GET_MODE_SIZE (GET_MODE (x));
2915
  return isize > osize
2916
         && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2917
}
2918
 
2919
 
2920
/* Process all the registers defined in the rtx, X.
2921
   Autoincrement/decrement definitions will be picked up by
2922
   df_uses_record.  */
2923
 
2924
static void
2925
df_def_record_1 (struct df_collection_rec *collection_rec,
2926
                 rtx x, basic_block bb, struct df_insn_info *insn_info,
2927
                 int flags)
2928
{
2929
  rtx *loc;
2930
  rtx dst;
2931
 
2932
 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
2933
     construct.  */
2934
  if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
2935
    loc = &XEXP (x, 0);
2936
  else
2937
    loc = &SET_DEST (x);
2938
  dst = *loc;
2939
 
2940
  /* It is legal to have a set destination be a parallel. */
2941
  if (GET_CODE (dst) == PARALLEL)
2942
    {
2943
      int i;
2944
 
2945
      for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2946
        {
2947
          rtx temp = XVECEXP (dst, 0, i);
2948
          if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
2949
              || GET_CODE (temp) == SET)
2950
            df_def_record_1 (collection_rec,
2951
                             temp, bb, insn_info,
2952
                             GET_CODE (temp) == CLOBBER
2953
                             ? flags | DF_REF_MUST_CLOBBER : flags);
2954
        }
2955
      return;
2956
    }
2957
 
2958
  if (GET_CODE (dst) == STRICT_LOW_PART)
2959
    {
2960
      flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2961
 
2962
      loc = &XEXP (dst, 0);
2963
      dst = *loc;
2964
    }
2965
 
2966
  if (GET_CODE (dst) == ZERO_EXTRACT)
2967
    {
2968
      flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2969
 
2970
      loc = &XEXP (dst, 0);
2971
      dst = *loc;
2972
    }
2973
 
2974
  /* At this point if we do not have a reg or a subreg, just return.  */
2975
  if (REG_P (dst))
2976
    {
2977
      df_ref_record (DF_REF_REGULAR, collection_rec,
2978
                     dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2979
 
2980
      /* We want to keep sp alive everywhere - by making all
2981
         writes to sp also use of sp. */
2982
      if (REGNO (dst) == STACK_POINTER_REGNUM)
2983
        df_ref_record (DF_REF_BASE, collection_rec,
2984
                       dst, NULL, bb, insn_info, DF_REF_REG_USE, flags);
2985
    }
2986
  else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2987
    {
2988
      if (df_read_modify_subreg_p (dst))
2989
        flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2990
 
2991
      flags |= DF_REF_SUBREG;
2992
 
2993
      df_ref_record (DF_REF_REGULAR, collection_rec,
2994
                     dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2995
    }
2996
}
2997
 
2998
 
2999
/* Process all the registers defined in the pattern rtx, X.  */
3000
 
3001
static void
3002
df_defs_record (struct df_collection_rec *collection_rec,
3003
                rtx x, basic_block bb, struct df_insn_info *insn_info,
3004
                int flags)
3005
{
3006
  RTX_CODE code = GET_CODE (x);
3007
 
3008
  if (code == SET || code == CLOBBER)
3009
    {
3010
      /* Mark the single def within the pattern.  */
3011
      int clobber_flags = flags;
3012
      clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
3013
      df_def_record_1 (collection_rec, x, bb, insn_info, clobber_flags);
3014
    }
3015
  else if (code == COND_EXEC)
3016
    {
3017
      df_defs_record (collection_rec, COND_EXEC_CODE (x),
3018
                      bb, insn_info, DF_REF_CONDITIONAL);
3019
    }
3020
  else if (code == PARALLEL)
3021
    {
3022
      int i;
3023
 
3024
      /* Mark the multiple defs within the pattern.  */
3025
      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3026
        df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn_info, flags);
3027
    }
3028
}
3029
 
3030
 
3031
/* Process all the registers used in the rtx at address LOC.  */
3032
 
3033
static void
3034
df_uses_record (struct df_collection_rec *collection_rec,
3035
                rtx *loc, enum df_ref_type ref_type,
3036
                basic_block bb, struct df_insn_info *insn_info,
3037
                int flags)
3038
{
3039
  RTX_CODE code;
3040
  rtx x;
3041
 
3042
 retry:
3043
  x = *loc;
3044
  if (!x)
3045
    return;
3046
  code = GET_CODE (x);
3047
  switch (code)
3048
    {
3049
    case LABEL_REF:
3050
    case SYMBOL_REF:
3051
    case CONST_INT:
3052
    case CONST:
3053
    case CONST_DOUBLE:
3054
    case CONST_FIXED:
3055
    case CONST_VECTOR:
3056
    case PC:
3057
    case CC0:
3058
    case ADDR_VEC:
3059
    case ADDR_DIFF_VEC:
3060
      return;
3061
 
3062
    case CLOBBER:
3063
      /* If we are clobbering a MEM, mark any registers inside the address
3064
         as being used.  */
3065
      if (MEM_P (XEXP (x, 0)))
3066
        df_uses_record (collection_rec,
3067
                        &XEXP (XEXP (x, 0), 0),
3068
                        DF_REF_REG_MEM_STORE,
3069
                        bb, insn_info,
3070
                        flags);
3071
 
3072
      /* If we're clobbering a REG then we have a def so ignore.  */
3073
      return;
3074
 
3075
    case MEM:
3076
      df_uses_record (collection_rec,
3077
                      &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
3078
                      bb, insn_info, flags & DF_REF_IN_NOTE);
3079
      return;
3080
 
3081
    case SUBREG:
3082
      /* While we're here, optimize this case.  */
3083
      flags |= DF_REF_PARTIAL;
3084
      /* In case the SUBREG is not of a REG, do not optimize.  */
3085
      if (!REG_P (SUBREG_REG (x)))
3086
        {
3087
          loc = &SUBREG_REG (x);
3088
          df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags);
3089
          return;
3090
        }
3091
      /* ... Fall through ...  */
3092
 
3093
    case REG:
3094
      df_ref_record (DF_REF_REGULAR, collection_rec,
3095
                     x, loc, bb, insn_info,
3096
                     ref_type, flags);
3097
      return;
3098
 
3099
    case SIGN_EXTRACT:
3100
    case ZERO_EXTRACT:
3101
      {
3102
        df_uses_record (collection_rec,
3103
                        &XEXP (x, 1), ref_type, bb, insn_info, flags);
3104
        df_uses_record (collection_rec,
3105
                        &XEXP (x, 2), ref_type, bb, insn_info, flags);
3106
 
3107
        /* If the parameters to the zero or sign extract are
3108
           constants, strip them off and recurse, otherwise there is
3109
           no information that we can gain from this operation.  */
3110
        if (code == ZERO_EXTRACT)
3111
          flags |= DF_REF_ZERO_EXTRACT;
3112
        else
3113
          flags |= DF_REF_SIGN_EXTRACT;
3114
 
3115
        df_uses_record (collection_rec,
3116
                        &XEXP (x, 0), ref_type, bb, insn_info, flags);
3117
        return;
3118
      }
3119
      break;
3120
 
3121
    case SET:
3122
      {
3123
        rtx dst = SET_DEST (x);
3124
        gcc_assert (!(flags & DF_REF_IN_NOTE));
3125
        df_uses_record (collection_rec,
3126
                        &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags);
3127
 
3128
        switch (GET_CODE (dst))
3129
          {
3130
            case SUBREG:
3131
              if (df_read_modify_subreg_p (dst))
3132
                {
3133
                  df_uses_record (collection_rec, &SUBREG_REG (dst),
3134
                                  DF_REF_REG_USE, bb, insn_info,
3135
                                  flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
3136
                  break;
3137
                }
3138
              /* Fall through.  */
3139
            case REG:
3140
            case PARALLEL:
3141
            case SCRATCH:
3142
            case PC:
3143
            case CC0:
3144
                break;
3145
            case MEM:
3146
              df_uses_record (collection_rec, &XEXP (dst, 0),
3147
                              DF_REF_REG_MEM_STORE, bb, insn_info, flags);
3148
              break;
3149
            case STRICT_LOW_PART:
3150
              {
3151
                rtx *temp = &XEXP (dst, 0);
3152
                /* A strict_low_part uses the whole REG and not just the
3153
                 SUBREG.  */
3154
                dst = XEXP (dst, 0);
3155
                df_uses_record (collection_rec,
3156
                                (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
3157
                                DF_REF_REG_USE, bb, insn_info,
3158
                                DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART);
3159
              }
3160
              break;
3161
            case ZERO_EXTRACT:
3162
              {
3163
                df_uses_record (collection_rec, &XEXP (dst, 1),
3164
                                DF_REF_REG_USE, bb, insn_info, flags);
3165
                df_uses_record (collection_rec, &XEXP (dst, 2),
3166
                                DF_REF_REG_USE, bb, insn_info, flags);
3167
                if (GET_CODE (XEXP (dst,0)) == MEM)
3168
                  df_uses_record (collection_rec, &XEXP (dst, 0),
3169
                                  DF_REF_REG_USE, bb, insn_info,
3170
                                  flags);
3171
                else
3172
                  df_uses_record (collection_rec, &XEXP (dst, 0),
3173
                                  DF_REF_REG_USE, bb, insn_info,
3174
                                  DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT);
3175
              }
3176
              break;
3177
 
3178
            default:
3179
              gcc_unreachable ();
3180
          }
3181
        return;
3182
      }
3183
 
3184
    case RETURN:
3185
    case SIMPLE_RETURN:
3186
      break;
3187
 
3188
    case ASM_OPERANDS:
3189
    case UNSPEC_VOLATILE:
3190
    case TRAP_IF:
3191
    case ASM_INPUT:
3192
      {
3193
        /* Traditional and volatile asm instructions must be
3194
           considered to use and clobber all hard registers, all
3195
           pseudo-registers and all of memory.  So must TRAP_IF and
3196
           UNSPEC_VOLATILE operations.
3197
 
3198
           Consider for instance a volatile asm that changes the fpu
3199
           rounding mode.  An insn should not be moved across this
3200
           even if it only uses pseudo-regs because it might give an
3201
           incorrectly rounded result.
3202
 
3203
           However, flow.c's liveness computation did *not* do this,
3204
           giving the reasoning as " ?!? Unfortunately, marking all
3205
           hard registers as live causes massive problems for the
3206
           register allocator and marking all pseudos as live creates
3207
           mountains of uninitialized variable warnings."
3208
 
3209
           In order to maintain the status quo with regard to liveness
3210
           and uses, we do what flow.c did and just mark any regs we
3211
           can find in ASM_OPERANDS as used.  In global asm insns are
3212
           scanned and regs_asm_clobbered is filled out.
3213
 
3214
           For all ASM_OPERANDS, we must traverse the vector of input
3215
           operands.  We can not just fall through here since then we
3216
           would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3217
           which do not indicate traditional asms unlike their normal
3218
           usage.  */
3219
        if (code == ASM_OPERANDS)
3220
          {
3221
            int j;
3222
 
3223
            for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3224
              df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
3225
                              DF_REF_REG_USE, bb, insn_info, flags);
3226
            return;
3227
          }
3228
        break;
3229
      }
3230
 
3231
    case VAR_LOCATION:
3232
      df_uses_record (collection_rec,
3233
                      &PAT_VAR_LOCATION_LOC (x),
3234
                      DF_REF_REG_USE, bb, insn_info, flags);
3235
      return;
3236
 
3237
    case PRE_DEC:
3238
    case POST_DEC:
3239
    case PRE_INC:
3240
    case POST_INC:
3241
    case PRE_MODIFY:
3242
    case POST_MODIFY:
3243
      gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3244
      /* Catch the def of the register being modified.  */
3245
      df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3246
                     bb, insn_info,
3247
                     DF_REF_REG_DEF,
3248
                     flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
3249
 
3250
      /* ... Fall through to handle uses ...  */
3251
 
3252
    default:
3253
      break;
3254
    }
3255
 
3256
  /* Recursively scan the operands of this expression.  */
3257
  {
3258
    const char *fmt = GET_RTX_FORMAT (code);
3259
    int i;
3260
 
3261
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3262
      {
3263
        if (fmt[i] == 'e')
3264
          {
3265
            /* Tail recursive case: save a function call level.  */
3266
            if (i == 0)
3267
              {
3268
                loc = &XEXP (x, 0);
3269
                goto retry;
3270
              }
3271
            df_uses_record (collection_rec, &XEXP (x, i), ref_type,
3272
                            bb, insn_info, flags);
3273
          }
3274
        else if (fmt[i] == 'E')
3275
          {
3276
            int j;
3277
            for (j = 0; j < XVECLEN (x, i); j++)
3278
              df_uses_record (collection_rec,
3279
                              &XVECEXP (x, i, j), ref_type,
3280
                              bb, insn_info, flags);
3281
          }
3282
      }
3283
  }
3284
 
3285
  return;
3286
}
3287
 
3288
 
3289
/* For all DF_REF_CONDITIONAL defs, add a corresponding uses.  */
3290
 
3291
static void
3292
df_get_conditional_uses (struct df_collection_rec *collection_rec)
3293
{
3294
  unsigned int ix;
3295
  df_ref ref;
3296
 
3297
  FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, ix, ref)
3298
    {
3299
      if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3300
        {
3301
          df_ref use;
3302
 
3303
          use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3304
                                         DF_REF_LOC (ref), DF_REF_BB (ref),
3305
                                         DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3306
                                         DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3307
          DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3308
        }
3309
    }
3310
}
3311
 
3312
 
3313
/* Get call's extra defs and uses. */
3314
 
3315
static void
3316
df_get_call_refs (struct df_collection_rec * collection_rec,
3317
                  basic_block bb,
3318
                  struct df_insn_info *insn_info,
3319
                  int flags)
3320
{
3321
  rtx note;
3322
  bitmap_iterator bi;
3323
  unsigned int ui;
3324
  bool is_sibling_call;
3325
  unsigned int i;
3326
  df_ref def;
3327
  bitmap_head defs_generated;
3328
 
3329
  bitmap_initialize (&defs_generated, &df_bitmap_obstack);
3330
 
3331
  /* Do not generate clobbers for registers that are the result of the
3332
     call.  This causes ordering problems in the chain building code
3333
     depending on which def is seen first.  */
3334
  FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, i, def)
3335
    bitmap_set_bit (&defs_generated, DF_REF_REGNO (def));
3336
 
3337
  /* Record the registers used to pass arguments, and explicitly
3338
     noted as clobbered.  */
3339
  for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3340
       note = XEXP (note, 1))
3341
    {
3342
      if (GET_CODE (XEXP (note, 0)) == USE)
3343
        df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3344
                        DF_REF_REG_USE, bb, insn_info, flags);
3345
      else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3346
        {
3347
          if (REG_P (XEXP (XEXP (note, 0), 0)))
3348
            {
3349
              unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3350
              if (!bitmap_bit_p (&defs_generated, regno))
3351
                df_defs_record (collection_rec, XEXP (note, 0), bb,
3352
                                insn_info, flags);
3353
            }
3354
          else
3355
            df_uses_record (collection_rec, &XEXP (note, 0),
3356
                            DF_REF_REG_USE, bb, insn_info, flags);
3357
        }
3358
    }
3359
 
3360
  /* The stack ptr is used (honorarily) by a CALL insn.  */
3361
  df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
3362
                 NULL, bb, insn_info, DF_REF_REG_USE,
3363
                 DF_REF_CALL_STACK_USAGE | flags);
3364
 
3365
  /* Calls to const functions cannot access any global registers and calls to
3366
     pure functions cannot set them.  All other calls may reference any of the
3367
     global registers, so they are recorded as used.  */
3368
  if (!RTL_CONST_CALL_P (insn_info->insn))
3369
    for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3370
      if (global_regs[i])
3371
        {
3372
          df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3373
                         NULL, bb, insn_info, DF_REF_REG_USE, flags);
3374
          if (!RTL_PURE_CALL_P (insn_info->insn))
3375
            df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3376
                           NULL, bb, insn_info, DF_REF_REG_DEF, flags);
3377
        }
3378
 
3379
  is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3380
  EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, ui, bi)
3381
    {
3382
      if (!global_regs[ui]
3383
          && (!bitmap_bit_p (&defs_generated, ui))
3384
          && (!is_sibling_call
3385
              || !bitmap_bit_p (df->exit_block_uses, ui)
3386
              || refers_to_regno_p (ui, ui+1,
3387
                                    crtl->return_rtx, NULL)))
3388
        df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[ui],
3389
                       NULL, bb, insn_info, DF_REF_REG_DEF,
3390
                       DF_REF_MAY_CLOBBER | flags);
3391
    }
3392
 
3393
  bitmap_clear (&defs_generated);
3394
  return;
3395
}
3396
 
3397
/* Collect all refs in the INSN. This function is free of any
3398
   side-effect - it will create and return a lists of df_ref's in the
3399
   COLLECTION_REC without putting those refs into existing ref chains
3400
   and reg chains. */
3401
 
3402
static void
3403
df_insn_refs_collect (struct df_collection_rec* collection_rec,
3404
                      basic_block bb, struct df_insn_info *insn_info)
3405
{
3406
  rtx note;
3407
  bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3408
 
3409
  /* Clear out the collection record.  */
3410
  VEC_truncate (df_ref, collection_rec->def_vec, 0);
3411
  VEC_truncate (df_ref, collection_rec->use_vec, 0);
3412
  VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
3413
  VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3414
 
3415
  /* Record register defs.  */
3416
  df_defs_record (collection_rec, PATTERN (insn_info->insn), bb, insn_info, 0);
3417
 
3418
  /* Process REG_EQUIV/REG_EQUAL notes.  */
3419
  for (note = REG_NOTES (insn_info->insn); note;
3420
       note = XEXP (note, 1))
3421
    {
3422
      switch (REG_NOTE_KIND (note))
3423
        {
3424
        case REG_EQUIV:
3425
        case REG_EQUAL:
3426
          df_uses_record (collection_rec,
3427
                          &XEXP (note, 0), DF_REF_REG_USE,
3428
                          bb, insn_info, DF_REF_IN_NOTE);
3429
          break;
3430
        case REG_NON_LOCAL_GOTO:
3431
          /* The frame ptr is used by a non-local goto.  */
3432
          df_ref_record (DF_REF_BASE, collection_rec,
3433
                         regno_reg_rtx[FRAME_POINTER_REGNUM],
3434
                         NULL, bb, insn_info,
3435
                         DF_REF_REG_USE, 0);
3436
#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3437
          df_ref_record (DF_REF_BASE, collection_rec,
3438
                         regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3439
                         NULL, bb, insn_info,
3440
                         DF_REF_REG_USE, 0);
3441
#endif
3442
          break;
3443
        default:
3444
          break;
3445
        }
3446
    }
3447
 
3448
  if (CALL_P (insn_info->insn))
3449
    df_get_call_refs (collection_rec, bb, insn_info,
3450
                      (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3451
 
3452
  /* Record the register uses.  */
3453
  df_uses_record (collection_rec,
3454
                  &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
3455
 
3456
  /* DF_REF_CONDITIONAL needs corresponding USES. */
3457
  if (is_cond_exec)
3458
    df_get_conditional_uses (collection_rec);
3459
 
3460
  df_canonize_collection_rec (collection_rec);
3461
}
3462
 
3463
/* Recompute the luids for the insns in BB.  */
3464
 
3465
void
3466
df_recompute_luids (basic_block bb)
3467
{
3468
  rtx insn;
3469
  int luid = 0;
3470
 
3471
  df_grow_insn_info ();
3472
 
3473
  /* Scan the block an insn at a time from beginning to end.  */
3474
  FOR_BB_INSNS (bb, insn)
3475
    {
3476
      struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3477
      /* Inserting labels does not always trigger the incremental
3478
         rescanning.  */
3479
      if (!insn_info)
3480
        {
3481
          gcc_assert (!INSN_P (insn));
3482
          insn_info = df_insn_create_insn_record (insn);
3483
        }
3484
 
3485
      DF_INSN_INFO_LUID (insn_info) = luid;
3486
      if (INSN_P (insn))
3487
        luid++;
3488
    }
3489
}
3490
 
3491
 
3492
/* Collect all artificial refs at the block level for BB and add them
3493
   to COLLECTION_REC.  */
3494
 
3495
static void
3496
df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3497
{
3498
  VEC_truncate (df_ref, collection_rec->def_vec, 0);
3499
  VEC_truncate (df_ref, collection_rec->use_vec, 0);
3500
  VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
3501
  VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3502
 
3503
  if (bb->index == ENTRY_BLOCK)
3504
    {
3505
      df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3506
      return;
3507
    }
3508
  else if (bb->index == EXIT_BLOCK)
3509
    {
3510
      df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3511
      return;
3512
    }
3513
 
3514
#ifdef EH_RETURN_DATA_REGNO
3515
  if (bb_has_eh_pred (bb))
3516
    {
3517
      unsigned int i;
3518
      /* Mark the registers that will contain data for the handler.  */
3519
      for (i = 0; ; ++i)
3520
        {
3521
          unsigned regno = EH_RETURN_DATA_REGNO (i);
3522
          if (regno == INVALID_REGNUM)
3523
            break;
3524
          df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3525
                         bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3526
        }
3527
    }
3528
#endif
3529
 
3530
  /* Add the hard_frame_pointer if this block is the target of a
3531
     non-local goto.  */
3532
  if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3533
    df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3534
                   bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3535
 
3536
  /* Add the artificial uses.  */
3537
  if (bb->index >= NUM_FIXED_BLOCKS)
3538
    {
3539
      bitmap_iterator bi;
3540
      unsigned int regno;
3541
      bitmap au = bb_has_eh_pred (bb)
3542
        ? &df->eh_block_artificial_uses
3543
        : &df->regular_block_artificial_uses;
3544
 
3545
      EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3546
        {
3547
          df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3548
                         bb, NULL, DF_REF_REG_USE, 0);
3549
        }
3550
    }
3551
 
3552
  df_canonize_collection_rec (collection_rec);
3553
}
3554
 
3555
 
3556
/* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS.  */
3557
 
3558
void
3559
df_bb_refs_record (int bb_index, bool scan_insns)
3560
{
3561
  basic_block bb = BASIC_BLOCK (bb_index);
3562
  rtx insn;
3563
  int luid = 0;
3564
  struct df_collection_rec collection_rec;
3565
 
3566
  if (!df)
3567
    return;
3568
 
3569
  df_grow_bb_info (df_scan);
3570
  collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
3571
  collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
3572
  collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
3573
  collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
3574
 
3575
  if (scan_insns)
3576
    /* Scan the block an insn at a time from beginning to end.  */
3577
    FOR_BB_INSNS (bb, insn)
3578
      {
3579
        struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3580
        gcc_assert (!insn_info);
3581
 
3582
        insn_info = df_insn_create_insn_record (insn);
3583
        if (INSN_P (insn))
3584
          {
3585
            /* Record refs within INSN.  */
3586
            DF_INSN_INFO_LUID (insn_info) = luid++;
3587
            df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3588
            df_refs_add_to_chains (&collection_rec, bb, insn);
3589
          }
3590
        DF_INSN_INFO_LUID (insn_info) = luid;
3591
      }
3592
 
3593
  /* Other block level artificial refs */
3594
  df_bb_refs_collect (&collection_rec, bb);
3595
  df_refs_add_to_chains (&collection_rec, bb, NULL);
3596
 
3597
  VEC_free (df_ref, stack, collection_rec.def_vec);
3598
  VEC_free (df_ref, stack, collection_rec.use_vec);
3599
  VEC_free (df_ref, stack, collection_rec.eq_use_vec);
3600
  VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
3601
 
3602
  /* Now that the block has been processed, set the block as dirty so
3603
     LR and LIVE will get it processed.  */
3604
  df_set_bb_dirty (bb);
3605
}
3606
 
3607
 
3608
/* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3609
   block. */
3610
 
3611
static void
3612
df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3613
{
3614
#ifdef EH_USES
3615
  unsigned int i;
3616
#endif
3617
 
3618
  bitmap_clear (regular_block_artificial_uses);
3619
 
3620
  if (reload_completed)
3621
    {
3622
      if (frame_pointer_needed)
3623
        bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3624
    }
3625
  else
3626
    /* Before reload, there are a few registers that must be forced
3627
       live everywhere -- which might not already be the case for
3628
       blocks within infinite loops.  */
3629
    {
3630
      unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3631
 
3632
      /* Any reference to any pseudo before reload is a potential
3633
         reference of the frame pointer.  */
3634
      bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3635
 
3636
#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3637
      bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3638
#endif
3639
 
3640
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3641
      /* Pseudos with argument area equivalences may require
3642
         reloading via the argument pointer.  */
3643
      if (fixed_regs[ARG_POINTER_REGNUM])
3644
        bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3645
#endif
3646
 
3647
      /* Any constant, or pseudo with constant equivalences, may
3648
         require reloading from memory using the pic register.  */
3649
      if (picreg != INVALID_REGNUM
3650
          && fixed_regs[picreg])
3651
        bitmap_set_bit (regular_block_artificial_uses, picreg);
3652
    }
3653
  /* The all-important stack pointer must always be live.  */
3654
  bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3655
 
3656
#ifdef EH_USES
3657
  /* EH_USES registers are used:
3658
     1) at all insns that might throw (calls or with -fnon-call-exceptions
3659
        trapping insns)
3660
     2) in all EH edges
3661
     3) to support backtraces and/or debugging, anywhere between their
3662
        initialization and where they the saved registers are restored
3663
        from them, including the cases where we don't reach the epilogue
3664
        (noreturn call or infinite loop).  */
3665
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3666
    if (EH_USES (i))
3667
      bitmap_set_bit (regular_block_artificial_uses, i);
3668
#endif
3669
}
3670
 
3671
 
3672
/* Get the artificial use set for an eh block. */
3673
 
3674
static void
3675
df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3676
{
3677
  bitmap_clear (eh_block_artificial_uses);
3678
 
3679
  /* The following code (down thru the arg_pointer setting APPEARS
3680
     to be necessary because there is nothing that actually
3681
     describes what the exception handling code may actually need
3682
     to keep alive.  */
3683
  if (reload_completed)
3684
    {
3685
      if (frame_pointer_needed)
3686
        {
3687
          bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3688
#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3689
          bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3690
#endif
3691
        }
3692
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3693
      if (fixed_regs[ARG_POINTER_REGNUM])
3694
        bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3695
#endif
3696
    }
3697
}
3698
 
3699
 
3700
 
3701
/*----------------------------------------------------------------------------
3702
   Specialized hard register scanning functions.
3703
----------------------------------------------------------------------------*/
3704
 
3705
 
3706
/* Mark a register in SET.  Hard registers in large modes get all
3707
   of their component registers set as well.  */
3708
 
3709
static void
3710
df_mark_reg (rtx reg, void *vset)
3711
{
3712
  bitmap set = (bitmap) vset;
3713
  int regno = REGNO (reg);
3714
 
3715
  gcc_assert (GET_MODE (reg) != BLKmode);
3716
 
3717
  if (regno < FIRST_PSEUDO_REGISTER)
3718
    {
3719
      int n = hard_regno_nregs[regno][GET_MODE (reg)];
3720
      bitmap_set_range (set, regno, n);
3721
    }
3722
  else
3723
    bitmap_set_bit (set, regno);
3724
}
3725
 
3726
 
3727
/* Set the bit for regs that are considered being defined at the entry. */
3728
 
3729
static void
3730
df_get_entry_block_def_set (bitmap entry_block_defs)
3731
{
3732
  rtx r;
3733
  int i;
3734
 
3735
  bitmap_clear (entry_block_defs);
3736
 
3737
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3738
    if (FUNCTION_ARG_REGNO_P (i))
3739
      bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3740
 
3741
  /* The always important stack pointer.  */
3742
  bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3743
 
3744
  /* Once the prologue has been generated, all of these registers
3745
     should just show up in the first regular block.  */
3746
  if (HAVE_prologue && epilogue_completed)
3747
    {
3748
      /* Defs for the callee saved registers are inserted so that the
3749
         pushes have some defining location.  */
3750
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3751
        if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3752
          bitmap_set_bit (entry_block_defs, i);
3753
    }
3754
 
3755
  r = targetm.calls.struct_value_rtx (current_function_decl, true);
3756
  if (r && REG_P (r))
3757
    bitmap_set_bit (entry_block_defs, REGNO (r));
3758
 
3759
  /* If the function has an incoming STATIC_CHAIN, it has to show up
3760
     in the entry def set.  */
3761
  r = targetm.calls.static_chain (current_function_decl, true);
3762
  if (r && REG_P (r))
3763
    bitmap_set_bit (entry_block_defs, REGNO (r));
3764
 
3765
  if ((!reload_completed) || frame_pointer_needed)
3766
    {
3767
      /* Any reference to any pseudo before reload is a potential
3768
         reference of the frame pointer.  */
3769
      bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3770
#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3771
      /* If they are different, also mark the hard frame pointer as live.  */
3772
      if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3773
        bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3774
#endif
3775
    }
3776
 
3777
  /* These registers are live everywhere.  */
3778
  if (!reload_completed)
3779
    {
3780
#ifdef PIC_OFFSET_TABLE_REGNUM
3781
      unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3782
#endif
3783
 
3784
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3785
      /* Pseudos with argument area equivalences may require
3786
         reloading via the argument pointer.  */
3787
      if (fixed_regs[ARG_POINTER_REGNUM])
3788
        bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3789
#endif
3790
 
3791
#ifdef PIC_OFFSET_TABLE_REGNUM
3792
      /* Any constant, or pseudo with constant equivalences, may
3793
         require reloading from memory using the pic register.  */
3794
      if (picreg != INVALID_REGNUM
3795
          && fixed_regs[picreg])
3796
        bitmap_set_bit (entry_block_defs, picreg);
3797
#endif
3798
    }
3799
 
3800
#ifdef INCOMING_RETURN_ADDR_RTX
3801
  if (REG_P (INCOMING_RETURN_ADDR_RTX))
3802
    bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3803
#endif
3804
 
3805
  targetm.extra_live_on_entry (entry_block_defs);
3806
}
3807
 
3808
 
3809
/* Return the (conservative) set of hard registers that are defined on
3810
   entry to the function.
3811
   It uses df->entry_block_defs to determine which register
3812
   reference to include.  */
3813
 
3814
static void
3815
df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3816
                             bitmap entry_block_defs)
3817
{
3818
  unsigned int i;
3819
  bitmap_iterator bi;
3820
 
3821
  EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3822
    {
3823
      df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3824
                     ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0);
3825
    }
3826
 
3827
  df_canonize_collection_rec (collection_rec);
3828
}
3829
 
3830
 
3831
/* Record the (conservative) set of hard registers that are defined on
3832
   entry to the function.  */
3833
 
3834
static void
3835
df_record_entry_block_defs (bitmap entry_block_defs)
3836
{
3837
  struct df_collection_rec collection_rec;
3838
  memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3839
  collection_rec.def_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
3840
  df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3841
 
3842
  /* Process bb_refs chain */
3843
  df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3844
  VEC_free (df_ref, stack, collection_rec.def_vec);
3845
}
3846
 
3847
 
3848
/* Update the defs in the entry block.  */
3849
 
3850
void
3851
df_update_entry_block_defs (void)
3852
{
3853
  bitmap_head refs;
3854
  bool changed = false;
3855
 
3856
  bitmap_initialize (&refs, &df_bitmap_obstack);
3857
  df_get_entry_block_def_set (&refs);
3858
  if (df->entry_block_defs)
3859
    {
3860
      if (!bitmap_equal_p (df->entry_block_defs, &refs))
3861
        {
3862
          struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3863
          df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3864
          df_ref_chain_delete (bb_info->artificial_defs);
3865
          bb_info->artificial_defs = NULL;
3866
          changed = true;
3867
        }
3868
    }
3869
  else
3870
    {
3871
      struct df_scan_problem_data *problem_data
3872
        = (struct df_scan_problem_data *) df_scan->problem_data;
3873
        gcc_unreachable ();
3874
      df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3875
      changed = true;
3876
    }
3877
 
3878
  if (changed)
3879
    {
3880
      df_record_entry_block_defs (&refs);
3881
      bitmap_copy (df->entry_block_defs, &refs);
3882
      df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
3883
    }
3884
  bitmap_clear (&refs);
3885
}
3886
 
3887
 
3888
/* Set the bit for regs that are considered being used at the exit. */
3889
 
3890
static void
3891
df_get_exit_block_use_set (bitmap exit_block_uses)
3892
{
3893
  unsigned int i;
3894
  unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3895
 
3896
  bitmap_clear (exit_block_uses);
3897
 
3898
  /* Stack pointer is always live at the exit.  */
3899
  bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3900
 
3901
  /* Mark the frame pointer if needed at the end of the function.
3902
     If we end up eliminating it, it will be removed from the live
3903
     list of each basic block by reload.  */
3904
 
3905
  if ((!reload_completed) || frame_pointer_needed)
3906
    {
3907
      bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3908
#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3909
      /* If they are different, also mark the hard frame pointer as live.  */
3910
      if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3911
        bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3912
#endif
3913
    }
3914
 
3915
  /* Many architectures have a GP register even without flag_pic.
3916
     Assume the pic register is not in use, or will be handled by
3917
     other means, if it is not fixed.  */
3918
  if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3919
      && picreg != INVALID_REGNUM
3920
      && fixed_regs[picreg])
3921
    bitmap_set_bit (exit_block_uses, picreg);
3922
 
3923
  /* Mark all global registers, and all registers used by the
3924
     epilogue as being live at the end of the function since they
3925
     may be referenced by our caller.  */
3926
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3927
    if (global_regs[i] || EPILOGUE_USES (i))
3928
      bitmap_set_bit (exit_block_uses, i);
3929
 
3930
  if (HAVE_epilogue && epilogue_completed)
3931
    {
3932
      /* Mark all call-saved registers that we actually used.  */
3933
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3934
        if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3935
            && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3936
          bitmap_set_bit (exit_block_uses, i);
3937
    }
3938
 
3939
#ifdef EH_RETURN_DATA_REGNO
3940
  /* Mark the registers that will contain data for the handler.  */
3941
  if (reload_completed && crtl->calls_eh_return)
3942
    for (i = 0; ; ++i)
3943
      {
3944
        unsigned regno = EH_RETURN_DATA_REGNO (i);
3945
        if (regno == INVALID_REGNUM)
3946
          break;
3947
        bitmap_set_bit (exit_block_uses, regno);
3948
      }
3949
#endif
3950
 
3951
#ifdef EH_RETURN_STACKADJ_RTX
3952
  if ((!HAVE_epilogue || ! epilogue_completed)
3953
      && crtl->calls_eh_return)
3954
    {
3955
      rtx tmp = EH_RETURN_STACKADJ_RTX;
3956
      if (tmp && REG_P (tmp))
3957
        df_mark_reg (tmp, exit_block_uses);
3958
    }
3959
#endif
3960
 
3961
#ifdef EH_RETURN_HANDLER_RTX
3962
  if ((!HAVE_epilogue || ! epilogue_completed)
3963
      && crtl->calls_eh_return)
3964
    {
3965
      rtx tmp = EH_RETURN_HANDLER_RTX;
3966
      if (tmp && REG_P (tmp))
3967
        df_mark_reg (tmp, exit_block_uses);
3968
    }
3969
#endif
3970
 
3971
  /* Mark function return value.  */
3972
  diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3973
}
3974
 
3975
 
3976
/* Return the refs of hard registers that are used in the exit block.
3977
   It uses df->exit_block_uses to determine register to include.  */
3978
 
3979
static void
3980
df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
3981
{
3982
  unsigned int i;
3983
  bitmap_iterator bi;
3984
 
3985
  EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3986
    df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3987
                   EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
3988
 
3989
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3990
  /* It is deliberate that this is not put in the exit block uses but
3991
     I do not know why.  */
3992
  if (reload_completed
3993
      && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3994
      && bb_has_eh_pred (EXIT_BLOCK_PTR)
3995
      && fixed_regs[ARG_POINTER_REGNUM])
3996
    df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3997
                   EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
3998
#endif
3999
 
4000
  df_canonize_collection_rec (collection_rec);
4001
}
4002
 
4003
 
4004
/* Record the set of hard registers that are used in the exit block.
4005
   It uses df->exit_block_uses to determine which bit to include.  */
4006
 
4007
static void
4008
df_record_exit_block_uses (bitmap exit_block_uses)
4009
{
4010
  struct df_collection_rec collection_rec;
4011
  memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4012
  collection_rec.use_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
4013
 
4014
  df_exit_block_uses_collect (&collection_rec, exit_block_uses);
4015
 
4016
  /* Process bb_refs chain */
4017
  df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
4018
  VEC_free (df_ref, stack, collection_rec.use_vec);
4019
}
4020
 
4021
 
4022
/* Update the uses in the exit block.  */
4023
 
4024
void
4025
df_update_exit_block_uses (void)
4026
{
4027
  bitmap_head refs;
4028
  bool changed = false;
4029
 
4030
  bitmap_initialize (&refs, &df_bitmap_obstack);
4031
  df_get_exit_block_use_set (&refs);
4032
  if (df->exit_block_uses)
4033
    {
4034
      if (!bitmap_equal_p (df->exit_block_uses, &refs))
4035
        {
4036
          struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
4037
          df_ref_chain_delete_du_chain (bb_info->artificial_uses);
4038
          df_ref_chain_delete (bb_info->artificial_uses);
4039
          bb_info->artificial_uses = NULL;
4040
          changed = true;
4041
        }
4042
    }
4043
  else
4044
    {
4045
      struct df_scan_problem_data *problem_data
4046
        = (struct df_scan_problem_data *) df_scan->problem_data;
4047
        gcc_unreachable ();
4048
      df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
4049
      changed = true;
4050
    }
4051
 
4052
  if (changed)
4053
    {
4054
      df_record_exit_block_uses (&refs);
4055
      bitmap_copy (df->exit_block_uses,& refs);
4056
      df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
4057
    }
4058
  bitmap_clear (&refs);
4059
}
4060
 
4061
static bool initialized = false;
4062
 
4063
 
4064
/* Initialize some platform specific structures.  */
4065
 
4066
void
4067
df_hard_reg_init (void)
4068
{
4069
#ifdef ELIMINABLE_REGS
4070
  int i;
4071
  static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
4072
#endif
4073
  if (initialized)
4074
    return;
4075
 
4076
  /* Record which registers will be eliminated.  We use this in
4077
     mark_used_regs.  */
4078
  CLEAR_HARD_REG_SET (elim_reg_set);
4079
 
4080
#ifdef ELIMINABLE_REGS
4081
  for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4082
    SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4083
#else
4084
  SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4085
#endif
4086
 
4087
  initialized = true;
4088
}
4089
 
4090
 
4091
/* Recompute the parts of scanning that are based on regs_ever_live
4092
   because something changed in that array.  */
4093
 
4094
void
4095
df_update_entry_exit_and_calls (void)
4096
{
4097
  basic_block bb;
4098
 
4099
  df_update_entry_block_defs ();
4100
  df_update_exit_block_uses ();
4101
 
4102
  /* The call insns need to be rescanned because there may be changes
4103
     in the set of registers clobbered across the call.  */
4104
  FOR_EACH_BB (bb)
4105
    {
4106
      rtx insn;
4107
      FOR_BB_INSNS (bb, insn)
4108
        {
4109
          if (INSN_P (insn) && CALL_P (insn))
4110
            df_insn_rescan (insn);
4111
        }
4112
    }
4113
}
4114
 
4115
 
4116
/* Return true if hard REG is actually used in the some instruction.
4117
   There are a fair number of conditions that affect the setting of
4118
   this array.  See the comment in df.h for df->hard_regs_live_count
4119
   for the conditions that this array is set. */
4120
 
4121
bool
4122
df_hard_reg_used_p (unsigned int reg)
4123
{
4124
  return df->hard_regs_live_count[reg] != 0;
4125
}
4126
 
4127
 
4128
/* A count of the number of times REG is actually used in the some
4129
   instruction.  There are a fair number of conditions that affect the
4130
   setting of this array.  See the comment in df.h for
4131
   df->hard_regs_live_count for the conditions that this array is
4132
   set. */
4133
 
4134
 
4135
unsigned int
4136
df_hard_reg_used_count (unsigned int reg)
4137
{
4138
  return df->hard_regs_live_count[reg];
4139
}
4140
 
4141
 
4142
/* Get the value of regs_ever_live[REGNO].  */
4143
 
4144
bool
4145
df_regs_ever_live_p (unsigned int regno)
4146
{
4147
  return regs_ever_live[regno];
4148
}
4149
 
4150
 
4151
/* Set regs_ever_live[REGNO] to VALUE.  If this cause regs_ever_live
4152
   to change, schedule that change for the next update.  */
4153
 
4154
void
4155
df_set_regs_ever_live (unsigned int regno, bool value)
4156
{
4157
  if (regs_ever_live[regno] == value)
4158
    return;
4159
 
4160
  regs_ever_live[regno] = value;
4161
  if (df)
4162
    df->redo_entry_and_exit = true;
4163
}
4164
 
4165
 
4166
/* Compute "regs_ever_live" information from the underlying df
4167
   information.  Set the vector to all false if RESET.  */
4168
 
4169
void
4170
df_compute_regs_ever_live (bool reset)
4171
{
4172
  unsigned int i;
4173
  bool changed = df->redo_entry_and_exit;
4174
 
4175
  if (reset)
4176
    memset (regs_ever_live, 0, sizeof (regs_ever_live));
4177
 
4178
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4179
    if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
4180
      {
4181
        regs_ever_live[i] = true;
4182
        changed = true;
4183
      }
4184
  if (changed)
4185
    df_update_entry_exit_and_calls ();
4186
  df->redo_entry_and_exit = false;
4187
}
4188
 
4189
 
4190
/*----------------------------------------------------------------------------
4191
  Dataflow ref information verification functions.
4192
 
4193
  df_reg_chain_mark (refs, regno, is_def, is_eq_use)
4194
  df_reg_chain_verify_unmarked (refs)
4195
  df_refs_verify (VEC(stack,df_ref)*, ref*, bool)
4196
  df_mws_verify (mw*, mw*, bool)
4197
  df_insn_refs_verify (collection_rec, bb, insn, bool)
4198
  df_bb_refs_verify (bb, refs, bool)
4199
  df_bb_verify (bb)
4200
  df_exit_block_bitmap_verify (bool)
4201
  df_entry_block_bitmap_verify (bool)
4202
  df_scan_verify ()
4203
----------------------------------------------------------------------------*/
4204
 
4205
 
4206
/* Mark all refs in the reg chain.  Verify that all of the registers
4207
are in the correct chain.  */
4208
 
4209
static unsigned int
4210
df_reg_chain_mark (df_ref refs, unsigned int regno,
4211
                   bool is_def, bool is_eq_use)
4212
{
4213
  unsigned int count = 0;
4214
  df_ref ref;
4215
  for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4216
    {
4217
      gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4218
 
4219
      /* If there are no def-use or use-def chains, make sure that all
4220
         of the chains are clear.  */
4221
      if (!df_chain)
4222
        gcc_assert (!DF_REF_CHAIN (ref));
4223
 
4224
      /* Check to make sure the ref is in the correct chain.  */
4225
      gcc_assert (DF_REF_REGNO (ref) == regno);
4226
      if (is_def)
4227
        gcc_assert (DF_REF_REG_DEF_P (ref));
4228
      else
4229
        gcc_assert (!DF_REF_REG_DEF_P (ref));
4230
 
4231
      if (is_eq_use)
4232
        gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4233
      else
4234
        gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4235
 
4236
      if (DF_REF_NEXT_REG (ref))
4237
        gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
4238
      count++;
4239
      DF_REF_REG_MARK (ref);
4240
    }
4241
  return count;
4242
}
4243
 
4244
 
4245
/* Verify that all of the registers in the chain are unmarked.  */
4246
 
4247
static void
4248
df_reg_chain_verify_unmarked (df_ref refs)
4249
{
4250
  df_ref ref;
4251
  for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4252
    gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4253
}
4254
 
4255
 
4256
/* Verify that NEW_REC and OLD_REC have exactly the same members. */
4257
 
4258
static bool
4259
df_refs_verify (VEC(df_ref,stack) *new_rec, df_ref *old_rec,
4260
                bool abort_if_fail)
4261
{
4262
  unsigned int ix;
4263
  df_ref new_ref;
4264
 
4265
  FOR_EACH_VEC_ELT (df_ref, new_rec, ix, new_ref)
4266
    {
4267
      if (*old_rec == NULL || !df_ref_equal_p (new_ref, *old_rec))
4268
        {
4269
          if (abort_if_fail)
4270
            gcc_assert (0);
4271
          else
4272
            return false;
4273
        }
4274
 
4275
      /* Abort if fail is called from the function level verifier.  If
4276
         that is the context, mark this reg as being seem.  */
4277
      if (abort_if_fail)
4278
        {
4279
          gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4280
          DF_REF_REG_UNMARK (*old_rec);
4281
        }
4282
 
4283
      old_rec++;
4284
    }
4285
 
4286
  if (abort_if_fail)
4287
    gcc_assert (*old_rec == NULL);
4288
  else
4289
    return *old_rec == NULL;
4290
  return false;
4291
}
4292
 
4293
 
4294
/* Verify that NEW_REC and OLD_REC have exactly the same members. */
4295
 
4296
static bool
4297
df_mws_verify (VEC(df_mw_hardreg_ptr,stack) *new_rec,
4298
               struct df_mw_hardreg **old_rec,
4299
               bool abort_if_fail)
4300
{
4301
  unsigned int ix;
4302
  struct df_mw_hardreg *new_reg;
4303
 
4304
  FOR_EACH_VEC_ELT (df_mw_hardreg_ptr, new_rec, ix, new_reg)
4305
    {
4306
      if (*old_rec == NULL || !df_mw_equal_p (new_reg, *old_rec))
4307
        {
4308
          if (abort_if_fail)
4309
            gcc_assert (0);
4310
          else
4311
            return false;
4312
        }
4313
      old_rec++;
4314
    }
4315
 
4316
  if (abort_if_fail)
4317
    gcc_assert (*old_rec == NULL);
4318
  else
4319
    return *old_rec == NULL;
4320
  return false;
4321
}
4322
 
4323
 
4324
/* Return true if the existing insn refs information is complete and
4325
   correct. Otherwise (i.e. if there's any missing or extra refs),
4326
   return the correct df_ref chain in REFS_RETURN.
4327
 
4328
   If ABORT_IF_FAIL, leave the refs that are verified (already in the
4329
   ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4330
   verification mode instead of the whole function, so unmark
4331
   everything.
4332
 
4333
   If ABORT_IF_FAIL is set, this function never returns false.  */
4334
 
4335
static bool
4336
df_insn_refs_verify (struct df_collection_rec *collection_rec,
4337
                     basic_block bb,
4338
                     rtx insn,
4339
                     bool abort_if_fail)
4340
{
4341
  bool ret1, ret2, ret3, ret4;
4342
  unsigned int uid = INSN_UID (insn);
4343
  struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4344
 
4345
  df_insn_refs_collect (collection_rec, bb, insn_info);
4346
 
4347
  if (!DF_INSN_UID_DEFS (uid))
4348
    {
4349
      /* The insn_rec was created but it was never filled out.  */
4350
      if (abort_if_fail)
4351
        gcc_assert (0);
4352
      else
4353
        return false;
4354
    }
4355
 
4356
  /* Unfortunately we cannot opt out early if one of these is not
4357
     right because the marks will not get cleared.  */
4358
  ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4359
                         abort_if_fail);
4360
  ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4361
                         abort_if_fail);
4362
  ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4363
                         abort_if_fail);
4364
  ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4365
                       abort_if_fail);
4366
  return (ret1 && ret2 && ret3 && ret4);
4367
}
4368
 
4369
 
4370
/* Return true if all refs in the basic block are correct and complete.
4371
   Due to df_ref_chain_verify, it will cause all refs
4372
   that are verified to have DF_REF_MARK bit set.  */
4373
 
4374
static bool
4375
df_bb_verify (basic_block bb)
4376
{
4377
  rtx insn;
4378
  struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4379
  struct df_collection_rec collection_rec;
4380
 
4381
  memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4382
  collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
4383
  collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
4384
  collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
4385
  collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
4386
 
4387
  gcc_assert (bb_info);
4388
 
4389
  /* Scan the block, one insn at a time, from beginning to end.  */
4390
  FOR_BB_INSNS_REVERSE (bb, insn)
4391
    {
4392
      if (!INSN_P (insn))
4393
        continue;
4394
      df_insn_refs_verify (&collection_rec, bb, insn, true);
4395
      df_free_collection_rec (&collection_rec);
4396
    }
4397
 
4398
  /* Do the artificial defs and uses.  */
4399
  df_bb_refs_collect (&collection_rec, bb);
4400
  df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4401
  df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4402
  df_free_collection_rec (&collection_rec);
4403
 
4404
  return true;
4405
}
4406
 
4407
 
4408
/* Returns true if the entry block has correct and complete df_ref set.
4409
   If not it either aborts if ABORT_IF_FAIL is true or returns false.  */
4410
 
4411
static bool
4412
df_entry_block_bitmap_verify (bool abort_if_fail)
4413
{
4414
  bitmap_head entry_block_defs;
4415
  bool is_eq;
4416
 
4417
  bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
4418
  df_get_entry_block_def_set (&entry_block_defs);
4419
 
4420
  is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
4421
 
4422
  if (!is_eq && abort_if_fail)
4423
    {
4424
      print_current_pass (stderr);
4425
      fprintf (stderr, "entry_block_defs = ");
4426
      df_print_regset (stderr, &entry_block_defs);
4427
      fprintf (stderr, "df->entry_block_defs = ");
4428
      df_print_regset (stderr, df->entry_block_defs);
4429
      gcc_assert (0);
4430
    }
4431
 
4432
  bitmap_clear (&entry_block_defs);
4433
 
4434
  return is_eq;
4435
}
4436
 
4437
 
4438
/* Returns true if the exit block has correct and complete df_ref set.
4439
   If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4440
 
4441
static bool
4442
df_exit_block_bitmap_verify (bool abort_if_fail)
4443
{
4444
  bitmap_head exit_block_uses;
4445
  bool is_eq;
4446
 
4447
  bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
4448
  df_get_exit_block_use_set (&exit_block_uses);
4449
 
4450
  is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
4451
 
4452
  if (!is_eq && abort_if_fail)
4453
    {
4454
      print_current_pass (stderr);
4455
      fprintf (stderr, "exit_block_uses = ");
4456
      df_print_regset (stderr, &exit_block_uses);
4457
      fprintf (stderr, "df->exit_block_uses = ");
4458
      df_print_regset (stderr, df->exit_block_uses);
4459
      gcc_assert (0);
4460
    }
4461
 
4462
  bitmap_clear (&exit_block_uses);
4463
 
4464
  return is_eq;
4465
}
4466
 
4467
 
4468
/* Return true if df_ref information for all insns in all blocks are
4469
   correct and complete.  */
4470
 
4471
void
4472
df_scan_verify (void)
4473
{
4474
  unsigned int i;
4475
  basic_block bb;
4476
  bitmap_head regular_block_artificial_uses;
4477
  bitmap_head eh_block_artificial_uses;
4478
 
4479
  if (!df)
4480
    return;
4481
 
4482
  /* Verification is a 4 step process. */
4483
 
4484
  /* (1) All of the refs are marked by going thru the reg chains.  */
4485
  for (i = 0; i < DF_REG_SIZE (df); i++)
4486
    {
4487
      gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4488
                  == DF_REG_DEF_COUNT(i));
4489
      gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4490
                  == DF_REG_USE_COUNT(i));
4491
      gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4492
                  == DF_REG_EQ_USE_COUNT(i));
4493
    }
4494
 
4495
  /* (2) There are various bitmaps whose value may change over the
4496
     course of the compilation.  This step recomputes them to make
4497
     sure that they have not slipped out of date.  */
4498
  bitmap_initialize (&regular_block_artificial_uses, &df_bitmap_obstack);
4499
  bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack);
4500
 
4501
  df_get_regular_block_artificial_uses (&regular_block_artificial_uses);
4502
  df_get_eh_block_artificial_uses (&eh_block_artificial_uses);
4503
 
4504
  bitmap_ior_into (&eh_block_artificial_uses,
4505
                   &regular_block_artificial_uses);
4506
 
4507
  /* Check artificial_uses bitmaps didn't change. */
4508
  gcc_assert (bitmap_equal_p (&regular_block_artificial_uses,
4509
                              &df->regular_block_artificial_uses));
4510
  gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
4511
                              &df->eh_block_artificial_uses));
4512
 
4513
  bitmap_clear (&regular_block_artificial_uses);
4514
  bitmap_clear (&eh_block_artificial_uses);
4515
 
4516
  /* Verify entry block and exit block. These only verify the bitmaps,
4517
     the refs are verified in df_bb_verify.  */
4518
  df_entry_block_bitmap_verify (true);
4519
  df_exit_block_bitmap_verify (true);
4520
 
4521
  /* (3) All of the insns in all of the blocks are traversed and the
4522
     marks are cleared both in the artificial refs attached to the
4523
     blocks and the real refs inside the insns.  It is a failure to
4524
     clear a mark that has not been set as this means that the ref in
4525
     the block or insn was not in the reg chain.  */
4526
 
4527
  FOR_ALL_BB (bb)
4528
    df_bb_verify (bb);
4529
 
4530
  /* (4) See if all reg chains are traversed a second time.  This time
4531
     a check is made that the marks are clear. A set mark would be a
4532
     from a reg that is not in any insn or basic block.  */
4533
 
4534
  for (i = 0; i < DF_REG_SIZE (df); i++)
4535
    {
4536
      df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4537
      df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4538
      df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
4539
    }
4540
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.