OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [tree-ssa-live.c] - Blame information for rev 778

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Liveness for SSA trees.
2
   Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
3
   Free Software Foundation, Inc.
4
   Contributed by Andrew MacLeod <amacleod@redhat.com>
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify
9
it under the terms of the GNU General Public License as published by
10
the Free Software Foundation; either version 3, or (at your option)
11
any later version.
12
 
13
GCC is distributed in the hope that it will be useful,
14
but WITHOUT ANY WARRANTY; without even the implied warranty of
15
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16
GNU General Public License for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "tree.h"
27
#include "tree-pretty-print.h"
28
#include "gimple-pretty-print.h"
29
#include "bitmap.h"
30
#include "tree-flow.h"
31
#include "tree-dump.h"
32
#include "tree-ssa-live.h"
33
#include "diagnostic-core.h"
34
#include "debug.h"
35
#include "flags.h"
36
#include "gimple.h"
37
 
38
#ifdef ENABLE_CHECKING
39
static void  verify_live_on_entry (tree_live_info_p);
40
#endif
41
 
42
 
43
/* VARMAP maintains a mapping from SSA version number to real variables.
44
 
45
   All SSA_NAMES are divided into partitions.  Initially each ssa_name is the
46
   only member of it's own partition.  Coalescing will attempt to group any
47
   ssa_names which occur in a copy or in a PHI node into the same partition.
48
 
49
   At the end of out-of-ssa, each partition becomes a "real" variable and is
50
   rewritten as a compiler variable.
51
 
52
   The var_map data structure is used to manage these partitions.  It allows
53
   partitions to be combined, and determines which partition belongs to what
54
   ssa_name or variable, and vice versa.  */
55
 
56
 
57
/* This routine will initialize the basevar fields of MAP.  */
58
 
59
static void
60
var_map_base_init (var_map map)
61
{
62
  int x, num_part, num;
63
  tree var;
64
  var_ann_t ann;
65
 
66
  num = 0;
67
  num_part = num_var_partitions (map);
68
 
69
  /* If a base table already exists, clear it, otherwise create it.  */
70
  if (map->partition_to_base_index != NULL)
71
    {
72
      free (map->partition_to_base_index);
73
      VEC_truncate (tree, map->basevars, 0);
74
    }
75
  else
76
    map->basevars = VEC_alloc (tree, heap, MAX (40, (num_part / 10)));
77
 
78
  map->partition_to_base_index = (int *) xmalloc (sizeof (int) * num_part);
79
 
80
  /* Build the base variable list, and point partitions at their bases.  */
81
  for (x = 0; x < num_part; x++)
82
    {
83
      var = partition_to_var (map, x);
84
      if (TREE_CODE (var) == SSA_NAME)
85
         var = SSA_NAME_VAR (var);
86
      ann = var_ann (var);
87
      /* If base variable hasn't been seen, set it up.  */
88
      if (!ann->base_var_processed)
89
        {
90
          ann->base_var_processed = 1;
91
          VAR_ANN_BASE_INDEX (ann) = num++;
92
          VEC_safe_push (tree, heap, map->basevars, var);
93
        }
94
      map->partition_to_base_index[x] = VAR_ANN_BASE_INDEX (ann);
95
    }
96
 
97
  map->num_basevars = num;
98
 
99
  /* Now clear the processed bit.  */
100
  for (x = 0; x < num; x++)
101
    {
102
       var = VEC_index (tree, map->basevars, x);
103
       var_ann (var)->base_var_processed = 0;
104
    }
105
 
106
#ifdef ENABLE_CHECKING
107
  for (x = 0; x < num_part; x++)
108
    {
109
      tree var2;
110
      var = SSA_NAME_VAR (partition_to_var (map, x));
111
      var2 = VEC_index (tree, map->basevars, basevar_index (map, x));
112
      gcc_assert (var == var2);
113
    }
114
#endif
115
}
116
 
117
 
118
/* Remove the base table in MAP.  */
119
 
120
static void
121
var_map_base_fini (var_map map)
122
{
123
  /* Free the basevar info if it is present.  */
124
  if (map->partition_to_base_index != NULL)
125
    {
126
      VEC_free (tree, heap, map->basevars);
127
      free (map->partition_to_base_index);
128
      map->partition_to_base_index = NULL;
129
      map->num_basevars = 0;
130
    }
131
}
132
/* Create a variable partition map of SIZE, initialize and return it.  */
133
 
134
var_map
135
init_var_map (int size)
136
{
137
  var_map map;
138
 
139
  map = (var_map) xmalloc (sizeof (struct _var_map));
140
  map->var_partition = partition_new (size);
141
 
142
  map->partition_to_view = NULL;
143
  map->view_to_partition = NULL;
144
  map->num_partitions = size;
145
  map->partition_size = size;
146
  map->num_basevars = 0;
147
  map->partition_to_base_index = NULL;
148
  map->basevars = NULL;
149
  return map;
150
}
151
 
152
 
153
/* Free memory associated with MAP.  */
154
 
155
void
156
delete_var_map (var_map map)
157
{
158
  var_map_base_fini (map);
159
  partition_delete (map->var_partition);
160
  free (map->partition_to_view);
161
  free (map->view_to_partition);
162
  free (map);
163
}
164
 
165
 
166
/* This function will combine the partitions in MAP for VAR1 and VAR2.  It
167
   Returns the partition which represents the new partition.  If the two
168
   partitions cannot be combined, NO_PARTITION is returned.  */
169
 
170
int
171
var_union (var_map map, tree var1, tree var2)
172
{
173
  int p1, p2, p3;
174
 
175
  gcc_assert (TREE_CODE (var1) == SSA_NAME);
176
  gcc_assert (TREE_CODE (var2) == SSA_NAME);
177
 
178
  /* This is independent of partition_to_view. If partition_to_view is
179
     on, then whichever one of these partitions is absorbed will never have a
180
     dereference into the partition_to_view array any more.  */
181
 
182
  p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
183
  p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
184
 
185
  gcc_assert (p1 != NO_PARTITION);
186
  gcc_assert (p2 != NO_PARTITION);
187
 
188
  if (p1 == p2)
189
    p3 = p1;
190
  else
191
    p3 = partition_union (map->var_partition, p1, p2);
192
 
193
  if (map->partition_to_view)
194
    p3 = map->partition_to_view[p3];
195
 
196
  return p3;
197
}
198
 
199
 
200
/* Compress the partition numbers in MAP such that they fall in the range
201
   0..(num_partitions-1) instead of wherever they turned out during
202
   the partitioning exercise.  This removes any references to unused
203
   partitions, thereby allowing bitmaps and other vectors to be much
204
   denser.
205
 
206
   This is implemented such that compaction doesn't affect partitioning.
207
   Ie., once partitions are created and possibly merged, running one
208
   or more different kind of compaction will not affect the partitions
209
   themselves.  Their index might change, but all the same variables will
210
   still be members of the same partition group.  This allows work on reduced
211
   sets, and no loss of information when a larger set is later desired.
212
 
213
   In particular, coalescing can work on partitions which have 2 or more
214
   definitions, and then 'recompact' later to include all the single
215
   definitions for assignment to program variables.  */
216
 
217
 
218
/* Set MAP back to the initial state of having no partition view.  Return a
219
   bitmap which has a bit set for each partition number which is in use in the
220
   varmap.  */
221
 
222
static bitmap
223
partition_view_init (var_map map)
224
{
225
  bitmap used;
226
  int tmp;
227
  unsigned int x;
228
 
229
  used = BITMAP_ALLOC (NULL);
230
 
231
  /* Already in a view? Abandon the old one.  */
232
  if (map->partition_to_view)
233
    {
234
      free (map->partition_to_view);
235
      map->partition_to_view = NULL;
236
    }
237
  if (map->view_to_partition)
238
    {
239
      free (map->view_to_partition);
240
      map->view_to_partition = NULL;
241
    }
242
 
243
  /* Find out which partitions are actually referenced.  */
244
  for (x = 0; x < map->partition_size; x++)
245
    {
246
      tmp = partition_find (map->var_partition, x);
247
      if (ssa_name (tmp) != NULL_TREE && is_gimple_reg (ssa_name (tmp))
248
          && (!has_zero_uses (ssa_name (tmp))
249
              || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))))
250
        bitmap_set_bit (used, tmp);
251
    }
252
 
253
  map->num_partitions = map->partition_size;
254
  return used;
255
}
256
 
257
 
258
/* This routine will finalize the view data for MAP based on the partitions
259
   set in SELECTED.  This is either the same bitmap returned from
260
   partition_view_init, or a trimmed down version if some of those partitions
261
   were not desired in this view.  SELECTED is freed before returning.  */
262
 
263
static void
264
partition_view_fini (var_map map, bitmap selected)
265
{
266
  bitmap_iterator bi;
267
  unsigned count, i, x, limit;
268
 
269
  gcc_assert (selected);
270
 
271
  count = bitmap_count_bits (selected);
272
  limit = map->partition_size;
273
 
274
  /* If its a one-to-one ratio, we don't need any view compaction.  */
275
  if (count < limit)
276
    {
277
      map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
278
      memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
279
      map->view_to_partition = (int *)xmalloc (count * sizeof (int));
280
 
281
      i = 0;
282
      /* Give each selected partition an index.  */
283
      EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
284
        {
285
          map->partition_to_view[x] = i;
286
          map->view_to_partition[i] = x;
287
          i++;
288
        }
289
      gcc_assert (i == count);
290
      map->num_partitions = i;
291
    }
292
 
293
  BITMAP_FREE (selected);
294
}
295
 
296
 
297
/* Create a partition view which includes all the used partitions in MAP.  If
298
   WANT_BASES is true, create the base variable map as well.  */
299
 
300
extern void
301
partition_view_normal (var_map map, bool want_bases)
302
{
303
  bitmap used;
304
 
305
  used = partition_view_init (map);
306
  partition_view_fini (map, used);
307
 
308
  if (want_bases)
309
    var_map_base_init (map);
310
  else
311
    var_map_base_fini (map);
312
}
313
 
314
 
315
/* Create a partition view in MAP which includes just partitions which occur in
316
   the bitmap ONLY. If WANT_BASES is true, create the base variable map
317
   as well.  */
318
 
319
extern void
320
partition_view_bitmap (var_map map, bitmap only, bool want_bases)
321
{
322
  bitmap used;
323
  bitmap new_partitions = BITMAP_ALLOC (NULL);
324
  unsigned x, p;
325
  bitmap_iterator bi;
326
 
327
  used = partition_view_init (map);
328
  EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
329
    {
330
      p = partition_find (map->var_partition, x);
331
      gcc_assert (bitmap_bit_p (used, p));
332
      bitmap_set_bit (new_partitions, p);
333
    }
334
  partition_view_fini (map, new_partitions);
335
 
336
  BITMAP_FREE (used);
337
  if (want_bases)
338
    var_map_base_init (map);
339
  else
340
    var_map_base_fini (map);
341
}
342
 
343
 
344
static inline void mark_all_vars_used (tree *, void *data);
345
 
346
/* Helper function for mark_all_vars_used, called via walk_tree.  */
347
 
348
static tree
349
mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data)
350
{
351
  tree t = *tp;
352
  enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
353
  tree b;
354
 
355
  if (TREE_CODE (t) == SSA_NAME)
356
    t = SSA_NAME_VAR (t);
357
 
358
  if (IS_EXPR_CODE_CLASS (c)
359
      && (b = TREE_BLOCK (t)) != NULL)
360
    TREE_USED (b) = true;
361
 
362
  /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
363
     fields do not contain vars.  */
364
  if (TREE_CODE (t) == TARGET_MEM_REF)
365
    {
366
      mark_all_vars_used (&TMR_BASE (t), data);
367
      mark_all_vars_used (&TMR_INDEX (t), data);
368
      mark_all_vars_used (&TMR_INDEX2 (t), data);
369
      *walk_subtrees = 0;
370
      return NULL;
371
    }
372
 
373
  /* Only need to mark VAR_DECLS; parameters and return results are not
374
     eliminated as unused.  */
375
  if (TREE_CODE (t) == VAR_DECL)
376
    {
377
      if (data != NULL && bitmap_clear_bit ((bitmap) data, DECL_UID (t))
378
          && DECL_CONTEXT (t) == current_function_decl)
379
        mark_all_vars_used (&DECL_INITIAL (t), data);
380
      set_is_used (t);
381
    }
382
  /* remove_unused_scope_block_p requires information about labels
383
     which are not DECL_IGNORED_P to tell if they might be used in the IL.  */
384
  if (TREE_CODE (t) == LABEL_DECL)
385
    /* Although the TREE_USED values that the frontend uses would be
386
       acceptable (albeit slightly over-conservative) for our purposes,
387
       init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
388
       must re-compute it here.  */
389
    TREE_USED (t) = 1;
390
 
391
  if (IS_TYPE_OR_DECL_P (t))
392
    *walk_subtrees = 0;
393
 
394
  return NULL;
395
}
396
 
397
/* Mark the scope block SCOPE and its subblocks unused when they can be
398
   possibly eliminated if dead.  */
399
 
400
static void
401
mark_scope_block_unused (tree scope)
402
{
403
  tree t;
404
  TREE_USED (scope) = false;
405
  if (!(*debug_hooks->ignore_block) (scope))
406
    TREE_USED (scope) = true;
407
  for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
408
    mark_scope_block_unused (t);
409
}
410
 
411
/* Look if the block is dead (by possibly eliminating its dead subblocks)
412
   and return true if so.
413
   Block is declared dead if:
414
     1) No statements are associated with it.
415
     2) Declares no live variables
416
     3) All subblocks are dead
417
        or there is precisely one subblocks and the block
418
        has same abstract origin as outer block and declares
419
        no variables, so it is pure wrapper.
420
   When we are not outputting full debug info, we also eliminate dead variables
421
   out of scope blocks to let them to be recycled by GGC and to save copying work
422
   done by the inliner.  */
423
 
424
static bool
425
remove_unused_scope_block_p (tree scope)
426
{
427
  tree *t, *next;
428
  bool unused = !TREE_USED (scope);
429
  int nsubblocks = 0;
430
 
431
  for (t = &BLOCK_VARS (scope); *t; t = next)
432
    {
433
      next = &DECL_CHAIN (*t);
434
 
435
      /* Debug info of nested function refers to the block of the
436
         function.  We might stil call it even if all statements
437
         of function it was nested into was elliminated.
438
 
439
         TODO: We can actually look into cgraph to see if function
440
         will be output to file.  */
441
      if (TREE_CODE (*t) == FUNCTION_DECL)
442
        unused = false;
443
 
444
      /* If a decl has a value expr, we need to instantiate it
445
         regardless of debug info generation, to avoid codegen
446
         differences in memory overlap tests.  update_equiv_regs() may
447
         indirectly call validate_equiv_mem() to test whether a
448
         SET_DEST overlaps with others, and if the value expr changes
449
         by virtual register instantiation, we may get end up with
450
         different results.  */
451
      else if (TREE_CODE (*t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*t))
452
        unused = false;
453
 
454
      /* Remove everything we don't generate debug info for.
455
         Don't remove larger vars though, because BLOCK_VARS are
456
         used also during expansion to determine which variables
457
         might share stack space.  */
458
      else if (DECL_IGNORED_P (*t) && is_gimple_reg (*t))
459
        {
460
          *t = DECL_CHAIN (*t);
461
          next = t;
462
        }
463
 
464
      /* When we are outputting debug info, we usually want to output
465
         info about optimized-out variables in the scope blocks.
466
         Exception are the scope blocks not containing any instructions
467
         at all so user can't get into the scopes at first place.  */
468
      else if (var_ann (*t) != NULL && is_used_p (*t))
469
        unused = false;
470
      else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
471
        /* For labels that are still used in the IL, the decision to
472
           preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
473
           risk having different ordering in debug vs.  non-debug builds
474
           during inlining or versioning.
475
           A label appearing here (we have already checked DECL_IGNORED_P)
476
           should not be used in the IL unless it has been explicitly used
477
           before, so we use TREE_USED as an approximation.  */
478
        /* In principle, we should do the same here as for the debug case
479
           below, however, when debugging, there might be additional nested
480
           levels that keep an upper level with a label live, so we have to
481
           force this block to be considered used, too.  */
482
        unused = false;
483
 
484
      /* When we are not doing full debug info, we however can keep around
485
         only the used variables for cfgexpand's memory packing saving quite
486
         a lot of memory.
487
 
488
         For sake of -g3, we keep around those vars but we don't count this as
489
         use of block, so innermost block with no used vars and no instructions
490
         can be considered dead.  We only want to keep around blocks user can
491
         breakpoint into and ask about value of optimized out variables.
492
 
493
         Similarly we need to keep around types at least until all
494
         variables of all nested blocks are gone.  We track no
495
         information on whether given type is used or not, so we have
496
         to keep them even when not emitting debug information,
497
         otherwise we may end up remapping variables and their (local)
498
         types in different orders depending on whether debug
499
         information is being generated.  */
500
 
501
      else if (TREE_CODE (*t) == TYPE_DECL
502
               || debug_info_level == DINFO_LEVEL_NORMAL
503
               || debug_info_level == DINFO_LEVEL_VERBOSE)
504
        ;
505
      else
506
        {
507
          *t = DECL_CHAIN (*t);
508
          next = t;
509
        }
510
    }
511
 
512
  for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
513
    if (remove_unused_scope_block_p (*t))
514
      {
515
        if (BLOCK_SUBBLOCKS (*t))
516
          {
517
            tree next = BLOCK_CHAIN (*t);
518
            tree supercontext = BLOCK_SUPERCONTEXT (*t);
519
 
520
            *t = BLOCK_SUBBLOCKS (*t);
521
            while (BLOCK_CHAIN (*t))
522
              {
523
                BLOCK_SUPERCONTEXT (*t) = supercontext;
524
                t = &BLOCK_CHAIN (*t);
525
              }
526
            BLOCK_CHAIN (*t) = next;
527
            BLOCK_SUPERCONTEXT (*t) = supercontext;
528
            t = &BLOCK_CHAIN (*t);
529
            nsubblocks ++;
530
          }
531
        else
532
          *t = BLOCK_CHAIN (*t);
533
      }
534
    else
535
      {
536
        t = &BLOCK_CHAIN (*t);
537
        nsubblocks ++;
538
      }
539
 
540
 
541
   if (!unused)
542
     ;
543
   /* Outer scope is always used.  */
544
   else if (!BLOCK_SUPERCONTEXT (scope)
545
            || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
546
     unused = false;
547
   /* Innermost blocks with no live variables nor statements can be always
548
      eliminated.  */
549
   else if (!nsubblocks)
550
     ;
551
   /* For terse debug info we can eliminate info on unused variables.  */
552
   else if (debug_info_level == DINFO_LEVEL_NONE
553
            || debug_info_level == DINFO_LEVEL_TERSE)
554
     {
555
       /* Even for -g0/-g1 don't prune outer scopes from artificial
556
          functions, otherwise diagnostics using tree_nonartificial_location
557
          will not be emitted properly.  */
558
       if (inlined_function_outer_scope_p (scope))
559
         {
560
           tree ao = scope;
561
 
562
           while (ao
563
                  && TREE_CODE (ao) == BLOCK
564
                  && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
565
             ao = BLOCK_ABSTRACT_ORIGIN (ao);
566
           if (ao
567
               && TREE_CODE (ao) == FUNCTION_DECL
568
               && DECL_DECLARED_INLINE_P (ao)
569
               && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
570
             unused = false;
571
         }
572
     }
573
   else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
574
     unused = false;
575
   /* See if this block is important for representation of inlined function.
576
      Inlined functions are always represented by block with
577
      block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION
578
      set...  */
579
   else if (inlined_function_outer_scope_p (scope))
580
     unused = false;
581
   else
582
   /* Verfify that only blocks with source location set
583
      are entry points to the inlined functions.  */
584
     gcc_assert (BLOCK_SOURCE_LOCATION (scope) == UNKNOWN_LOCATION);
585
 
586
   TREE_USED (scope) = !unused;
587
   return unused;
588
}
589
 
590
/* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
591
   eliminated during the tree->rtl conversion process.  */
592
 
593
static inline void
594
mark_all_vars_used (tree *expr_p, void *data)
595
{
596
  walk_tree (expr_p, mark_all_vars_used_1, data, NULL);
597
}
598
 
599
 
600
/* Dump scope blocks starting at SCOPE to FILE.  INDENT is the
601
   indentation level and FLAGS is as in print_generic_expr.  */
602
 
603
static void
604
dump_scope_block (FILE *file, int indent, tree scope, int flags)
605
{
606
  tree var, t;
607
  unsigned int i;
608
 
609
  fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope),
610
           TREE_USED (scope) ? "" : " (unused)",
611
           BLOCK_ABSTRACT (scope) ? " (abstract)": "");
612
  if (BLOCK_SOURCE_LOCATION (scope) != UNKNOWN_LOCATION)
613
    {
614
      expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
615
      fprintf (file, " %s:%i", s.file, s.line);
616
    }
617
  if (BLOCK_ABSTRACT_ORIGIN (scope))
618
    {
619
      tree origin = block_ultimate_origin (scope);
620
      if (origin)
621
        {
622
          fprintf (file, " Originating from :");
623
          if (DECL_P (origin))
624
            print_generic_decl (file, origin, flags);
625
          else
626
            fprintf (file, "#%i", BLOCK_NUMBER (origin));
627
        }
628
    }
629
  fprintf (file, " \n");
630
  for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
631
    {
632
      bool used = false;
633
 
634
      if (var_ann (var))
635
        used = is_used_p (var);
636
 
637
      fprintf (file, "%*s", indent, "");
638
      print_generic_decl (file, var, flags);
639
      fprintf (file, "%s\n", used ? "" : " (unused)");
640
    }
641
  for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
642
    {
643
      fprintf (file, "%*s",indent, "");
644
      print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
645
                          flags);
646
      fprintf (file, " (nonlocalized)\n");
647
    }
648
  for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
649
    dump_scope_block (file, indent + 2, t, flags);
650
  fprintf (file, "\n%*s}\n",indent, "");
651
}
652
 
653
/* Dump the tree of lexical scopes starting at SCOPE to stderr.  FLAGS
654
   is as in print_generic_expr.  */
655
 
656
DEBUG_FUNCTION void
657
debug_scope_block (tree scope, int flags)
658
{
659
  dump_scope_block (stderr, 0, scope, flags);
660
}
661
 
662
 
663
/* Dump the tree of lexical scopes of current_function_decl to FILE.
664
   FLAGS is as in print_generic_expr.  */
665
 
666
void
667
dump_scope_blocks (FILE *file, int flags)
668
{
669
  dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
670
}
671
 
672
 
673
/* Dump the tree of lexical scopes of current_function_decl to stderr.
674
   FLAGS is as in print_generic_expr.  */
675
 
676
DEBUG_FUNCTION void
677
debug_scope_blocks (int flags)
678
{
679
  dump_scope_blocks (stderr, flags);
680
}
681
 
682
/* Remove local variables that are not referenced in the IL.  */
683
 
684
void
685
remove_unused_locals (void)
686
{
687
  basic_block bb;
688
  tree var, t;
689
  referenced_var_iterator rvi;
690
  bitmap global_unused_vars = NULL;
691
  unsigned srcidx, dstidx, num;
692
  bool have_local_clobbers = false;
693
 
694
  /* Removing declarations from lexical blocks when not optimizing is
695
     not only a waste of time, it actually causes differences in stack
696
     layout.  */
697
  if (!optimize)
698
    return;
699
 
700
  timevar_push (TV_REMOVE_UNUSED);
701
 
702
  mark_scope_block_unused (DECL_INITIAL (current_function_decl));
703
 
704
  /* Assume all locals are unused.  */
705
  FOR_EACH_REFERENCED_VAR (cfun, t, rvi)
706
    clear_is_used (t);
707
 
708
  /* Walk the CFG marking all referenced symbols.  */
709
  FOR_EACH_BB (bb)
710
    {
711
      gimple_stmt_iterator gsi;
712
      size_t i;
713
      edge_iterator ei;
714
      edge e;
715
 
716
      /* Walk the statements.  */
717
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
718
        {
719
          gimple stmt = gsi_stmt (gsi);
720
          tree b = gimple_block (stmt);
721
 
722
          if (is_gimple_debug (stmt))
723
            continue;
724
 
725
          if (gimple_clobber_p (stmt))
726
            {
727
              have_local_clobbers = true;
728
              continue;
729
            }
730
 
731
          if (b)
732
            TREE_USED (b) = true;
733
 
734
          for (i = 0; i < gimple_num_ops (stmt); i++)
735
            mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i), NULL);
736
        }
737
 
738
      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
739
        {
740
          use_operand_p arg_p;
741
          ssa_op_iter i;
742
          tree def;
743
          gimple phi = gsi_stmt (gsi);
744
 
745
          /* No point processing globals.  */
746
          if (is_global_var (SSA_NAME_VAR (gimple_phi_result (phi))))
747
            continue;
748
 
749
          def = gimple_phi_result (phi);
750
          mark_all_vars_used (&def, NULL);
751
 
752
          FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
753
            {
754
              tree arg = USE_FROM_PTR (arg_p);
755
              mark_all_vars_used (&arg, NULL);
756
            }
757
        }
758
 
759
      FOR_EACH_EDGE (e, ei, bb->succs)
760
        if (e->goto_locus)
761
          TREE_USED (e->goto_block) = true;
762
    }
763
 
764
  /* We do a two-pass approach about the out-of-scope clobbers.  We want
765
     to remove them if they are the only references to a local variable,
766
     but we want to retain them when there's any other.  So the first pass
767
     ignores them, and the second pass (if there were any) tries to remove
768
     them.  */
769
  if (have_local_clobbers)
770
    FOR_EACH_BB (bb)
771
      {
772
        gimple_stmt_iterator gsi;
773
 
774
        for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
775
          {
776
            gimple stmt = gsi_stmt (gsi);
777
            tree b = gimple_block (stmt);
778
 
779
            if (gimple_clobber_p (stmt))
780
              {
781
                tree lhs = gimple_assign_lhs (stmt);
782
                lhs = get_base_address (lhs);
783
                if (TREE_CODE (lhs) == SSA_NAME)
784
                  lhs = SSA_NAME_VAR (lhs);
785
                if (DECL_P (lhs) && (!var_ann (lhs) || !is_used_p (lhs)))
786
                  {
787
                    unlink_stmt_vdef (stmt);
788
                    gsi_remove (&gsi, true);
789
                    release_defs (stmt);
790
                    continue;
791
                  }
792
                if (b)
793
                  TREE_USED (b) = true;
794
              }
795
            gsi_next (&gsi);
796
          }
797
      }
798
 
799
  cfun->has_local_explicit_reg_vars = false;
800
 
801
  /* Remove unmarked local vars from local_decls.  */
802
  num = VEC_length (tree, cfun->local_decls);
803
  for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
804
    {
805
      var = VEC_index (tree, cfun->local_decls, srcidx);
806
      if (TREE_CODE (var) != FUNCTION_DECL
807
          && (!var_ann (var)
808
              || !is_used_p (var)))
809
        {
810
          if (is_global_var (var))
811
            {
812
              if (global_unused_vars == NULL)
813
                global_unused_vars = BITMAP_ALLOC (NULL);
814
              bitmap_set_bit (global_unused_vars, DECL_UID (var));
815
            }
816
          else
817
            continue;
818
        }
819
      else if (TREE_CODE (var) == VAR_DECL
820
               && DECL_HARD_REGISTER (var)
821
               && !is_global_var (var))
822
        cfun->has_local_explicit_reg_vars = true;
823
 
824
      if (srcidx != dstidx)
825
        VEC_replace (tree, cfun->local_decls, dstidx, var);
826
      dstidx++;
827
    }
828
  if (dstidx != num)
829
    VEC_truncate (tree, cfun->local_decls, dstidx);
830
 
831
  /* Remove unmarked global vars from local_decls.  */
832
  if (global_unused_vars != NULL)
833
    {
834
      tree var;
835
      unsigned ix;
836
      FOR_EACH_LOCAL_DECL (cfun, ix, var)
837
        if (TREE_CODE (var) == VAR_DECL
838
            && is_global_var (var)
839
            && var_ann (var) != NULL
840
            && is_used_p (var)
841
            && DECL_CONTEXT (var) == current_function_decl)
842
          mark_all_vars_used (&DECL_INITIAL (var), global_unused_vars);
843
 
844
      num = VEC_length (tree, cfun->local_decls);
845
      for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
846
        {
847
          var = VEC_index (tree, cfun->local_decls, srcidx);
848
          if (TREE_CODE (var) == VAR_DECL
849
              && is_global_var (var)
850
              && bitmap_bit_p (global_unused_vars, DECL_UID (var)))
851
            continue;
852
 
853
          if (srcidx != dstidx)
854
            VEC_replace (tree, cfun->local_decls, dstidx, var);
855
          dstidx++;
856
        }
857
      if (dstidx != num)
858
        VEC_truncate (tree, cfun->local_decls, dstidx);
859
      BITMAP_FREE (global_unused_vars);
860
    }
861
 
862
  /* Remove unused variables from REFERENCED_VARs.  */
863
  FOR_EACH_REFERENCED_VAR (cfun, t, rvi)
864
    if (!is_global_var (t)
865
        && TREE_CODE (t) != PARM_DECL
866
        && TREE_CODE (t) != RESULT_DECL
867
        && !is_used_p (t))
868
      remove_referenced_var (t);
869
  remove_unused_scope_block_p (DECL_INITIAL (current_function_decl));
870
  if (dump_file && (dump_flags & TDF_DETAILS))
871
    {
872
      fprintf (dump_file, "Scope blocks after cleanups:\n");
873
      dump_scope_blocks (dump_file, dump_flags);
874
    }
875
 
876
  timevar_pop (TV_REMOVE_UNUSED);
877
}
878
 
879
 
880
/* Allocate and return a new live range information object base on MAP.  */
881
 
882
static tree_live_info_p
883
new_tree_live_info (var_map map)
884
{
885
  tree_live_info_p live;
886
  unsigned x;
887
 
888
  live = (tree_live_info_p) xmalloc (sizeof (struct tree_live_info_d));
889
  live->map = map;
890
  live->num_blocks = last_basic_block;
891
 
892
  live->livein = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap));
893
  for (x = 0; x < (unsigned)last_basic_block; x++)
894
    live->livein[x] = BITMAP_ALLOC (NULL);
895
 
896
  live->liveout = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap));
897
  for (x = 0; x < (unsigned)last_basic_block; x++)
898
    live->liveout[x] = BITMAP_ALLOC (NULL);
899
 
900
  live->work_stack = XNEWVEC (int, last_basic_block);
901
  live->stack_top = live->work_stack;
902
 
903
  live->global = BITMAP_ALLOC (NULL);
904
  return live;
905
}
906
 
907
 
908
/* Free storage for live range info object LIVE.  */
909
 
910
void
911
delete_tree_live_info (tree_live_info_p live)
912
{
913
  int x;
914
 
915
  BITMAP_FREE (live->global);
916
  free (live->work_stack);
917
 
918
  for (x = live->num_blocks - 1; x >= 0; x--)
919
    BITMAP_FREE (live->liveout[x]);
920
  free (live->liveout);
921
 
922
  for (x = live->num_blocks - 1; x >= 0; x--)
923
    BITMAP_FREE (live->livein[x]);
924
  free (live->livein);
925
 
926
  free (live);
927
}
928
 
929
 
930
/* Visit basic block BB and propagate any required live on entry bits from
931
   LIVE into the predecessors.  VISITED is the bitmap of visited blocks.
932
   TMP is a temporary work bitmap which is passed in to avoid reallocating
933
   it each time.  */
934
 
935
static void
936
loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited,
937
                 bitmap tmp)
938
{
939
  edge e;
940
  bool change;
941
  edge_iterator ei;
942
  basic_block pred_bb;
943
  bitmap loe;
944
  gcc_assert (!TEST_BIT (visited, bb->index));
945
 
946
  SET_BIT (visited, bb->index);
947
  loe = live_on_entry (live, bb);
948
 
949
  FOR_EACH_EDGE (e, ei, bb->preds)
950
    {
951
      pred_bb = e->src;
952
      if (pred_bb == ENTRY_BLOCK_PTR)
953
        continue;
954
      /* TMP is variables live-on-entry from BB that aren't defined in the
955
         predecessor block.  This should be the live on entry vars to pred.
956
         Note that liveout is the DEFs in a block while live on entry is
957
         being calculated.  */
958
      bitmap_and_compl (tmp, loe, live->liveout[pred_bb->index]);
959
 
960
      /* Add these bits to live-on-entry for the pred. if there are any
961
         changes, and pred_bb has been visited already, add it to the
962
         revisit stack.  */
963
      change = bitmap_ior_into (live_on_entry (live, pred_bb), tmp);
964
      if (TEST_BIT (visited, pred_bb->index) && change)
965
        {
966
          RESET_BIT (visited, pred_bb->index);
967
          *(live->stack_top)++ = pred_bb->index;
968
        }
969
    }
970
}
971
 
972
 
973
/* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
974
   of all the variables.  */
975
 
976
static void
977
live_worklist (tree_live_info_p live)
978
{
979
  unsigned b;
980
  basic_block bb;
981
  sbitmap visited = sbitmap_alloc (last_basic_block + 1);
982
  bitmap tmp = BITMAP_ALLOC (NULL);
983
 
984
  sbitmap_zero (visited);
985
 
986
  /* Visit all the blocks in reverse order and propagate live on entry values
987
     into the predecessors blocks.  */
988
  FOR_EACH_BB_REVERSE (bb)
989
    loe_visit_block (live, bb, visited, tmp);
990
 
991
  /* Process any blocks which require further iteration.  */
992
  while (live->stack_top != live->work_stack)
993
    {
994
      b = *--(live->stack_top);
995
      loe_visit_block (live, BASIC_BLOCK (b), visited, tmp);
996
    }
997
 
998
  BITMAP_FREE (tmp);
999
  sbitmap_free (visited);
1000
}
1001
 
1002
 
1003
/* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1004
   links.  Set the live on entry fields in LIVE.  Def's are marked temporarily
1005
   in the liveout vector.  */
1006
 
1007
static void
1008
set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
1009
{
1010
  int p;
1011
  gimple stmt;
1012
  use_operand_p use;
1013
  basic_block def_bb = NULL;
1014
  imm_use_iterator imm_iter;
1015
  bool global = false;
1016
 
1017
  p = var_to_partition (live->map, ssa_name);
1018
  if (p == NO_PARTITION)
1019
    return;
1020
 
1021
  stmt = SSA_NAME_DEF_STMT (ssa_name);
1022
  if (stmt)
1023
    {
1024
      def_bb = gimple_bb (stmt);
1025
      /* Mark defs in liveout bitmap temporarily.  */
1026
      if (def_bb)
1027
        bitmap_set_bit (live->liveout[def_bb->index], p);
1028
    }
1029
  else
1030
    def_bb = ENTRY_BLOCK_PTR;
1031
 
1032
  /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1033
     add it to the list of live on entry blocks.  */
1034
  FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
1035
    {
1036
      gimple use_stmt = USE_STMT (use);
1037
      basic_block add_block = NULL;
1038
 
1039
      if (gimple_code (use_stmt) == GIMPLE_PHI)
1040
        {
1041
          /* Uses in PHI's are considered to be live at exit of the SRC block
1042
             as this is where a copy would be inserted.  Check to see if it is
1043
             defined in that block, or whether its live on entry.  */
1044
          int index = PHI_ARG_INDEX_FROM_USE (use);
1045
          edge e = gimple_phi_arg_edge (use_stmt, index);
1046
          if (e->src != ENTRY_BLOCK_PTR)
1047
            {
1048
              if (e->src != def_bb)
1049
                add_block = e->src;
1050
            }
1051
        }
1052
      else if (is_gimple_debug (use_stmt))
1053
        continue;
1054
      else
1055
        {
1056
          /* If its not defined in this block, its live on entry.  */
1057
          basic_block use_bb = gimple_bb (use_stmt);
1058
          if (use_bb != def_bb)
1059
            add_block = use_bb;
1060
        }
1061
 
1062
      /* If there was a live on entry use, set the bit.  */
1063
      if (add_block)
1064
        {
1065
          global = true;
1066
          bitmap_set_bit (live->livein[add_block->index], p);
1067
        }
1068
    }
1069
 
1070
  /* If SSA_NAME is live on entry to at least one block, fill in all the live
1071
     on entry blocks between the def and all the uses.  */
1072
  if (global)
1073
    bitmap_set_bit (live->global, p);
1074
}
1075
 
1076
 
1077
/* Calculate the live on exit vectors based on the entry info in LIVEINFO.  */
1078
 
1079
void
1080
calculate_live_on_exit (tree_live_info_p liveinfo)
1081
{
1082
  basic_block bb;
1083
  edge e;
1084
  edge_iterator ei;
1085
 
1086
  /* live on entry calculations used liveout vectors for defs, clear them.  */
1087
  FOR_EACH_BB (bb)
1088
    bitmap_clear (liveinfo->liveout[bb->index]);
1089
 
1090
  /* Set all the live-on-exit bits for uses in PHIs.  */
1091
  FOR_EACH_BB (bb)
1092
    {
1093
      gimple_stmt_iterator gsi;
1094
      size_t i;
1095
 
1096
      /* Mark the PHI arguments which are live on exit to the pred block.  */
1097
      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1098
        {
1099
          gimple phi = gsi_stmt (gsi);
1100
          for (i = 0; i < gimple_phi_num_args (phi); i++)
1101
            {
1102
              tree t = PHI_ARG_DEF (phi, i);
1103
              int p;
1104
 
1105
              if (TREE_CODE (t) != SSA_NAME)
1106
                continue;
1107
 
1108
              p = var_to_partition (liveinfo->map, t);
1109
              if (p == NO_PARTITION)
1110
                continue;
1111
              e = gimple_phi_arg_edge (phi, i);
1112
              if (e->src != ENTRY_BLOCK_PTR)
1113
                bitmap_set_bit (liveinfo->liveout[e->src->index], p);
1114
            }
1115
        }
1116
 
1117
      /* Add each successors live on entry to this bock live on exit.  */
1118
      FOR_EACH_EDGE (e, ei, bb->succs)
1119
        if (e->dest != EXIT_BLOCK_PTR)
1120
          bitmap_ior_into (liveinfo->liveout[bb->index],
1121
                           live_on_entry (liveinfo, e->dest));
1122
    }
1123
}
1124
 
1125
 
1126
/* Given partition map MAP, calculate all the live on entry bitmaps for
1127
   each partition.  Return a new live info object.  */
1128
 
1129
tree_live_info_p
1130
calculate_live_ranges (var_map map)
1131
{
1132
  tree var;
1133
  unsigned i;
1134
  tree_live_info_p live;
1135
 
1136
  live = new_tree_live_info (map);
1137
  for (i = 0; i < num_var_partitions (map); i++)
1138
    {
1139
      var = partition_to_var (map, i);
1140
      if (var != NULL_TREE)
1141
        set_var_live_on_entry (var, live);
1142
    }
1143
 
1144
  live_worklist (live);
1145
 
1146
#ifdef ENABLE_CHECKING
1147
  verify_live_on_entry (live);
1148
#endif
1149
 
1150
  calculate_live_on_exit (live);
1151
  return live;
1152
}
1153
 
1154
 
1155
/* Output partition map MAP to file F.  */
1156
 
1157
void
1158
dump_var_map (FILE *f, var_map map)
1159
{
1160
  int t;
1161
  unsigned x, y;
1162
  int p;
1163
 
1164
  fprintf (f, "\nPartition map \n\n");
1165
 
1166
  for (x = 0; x < map->num_partitions; x++)
1167
    {
1168
      if (map->view_to_partition != NULL)
1169
        p = map->view_to_partition[x];
1170
      else
1171
        p = x;
1172
 
1173
      if (ssa_name (p) == NULL_TREE)
1174
        continue;
1175
 
1176
      t = 0;
1177
      for (y = 1; y < num_ssa_names; y++)
1178
        {
1179
          p = partition_find (map->var_partition, y);
1180
          if (map->partition_to_view)
1181
            p = map->partition_to_view[p];
1182
          if (p == (int)x)
1183
            {
1184
              if (t++ == 0)
1185
                {
1186
                  fprintf(f, "Partition %d (", x);
1187
                  print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1188
                  fprintf (f, " - ");
1189
                }
1190
              fprintf (f, "%d ", y);
1191
            }
1192
        }
1193
      if (t != 0)
1194
        fprintf (f, ")\n");
1195
    }
1196
  fprintf (f, "\n");
1197
}
1198
 
1199
 
1200
/* Output live range info LIVE to file F, controlled by FLAG.  */
1201
 
1202
void
1203
dump_live_info (FILE *f, tree_live_info_p live, int flag)
1204
{
1205
  basic_block bb;
1206
  unsigned i;
1207
  var_map map = live->map;
1208
  bitmap_iterator bi;
1209
 
1210
  if ((flag & LIVEDUMP_ENTRY) && live->livein)
1211
    {
1212
      FOR_EACH_BB (bb)
1213
        {
1214
          fprintf (f, "\nLive on entry to BB%d : ", bb->index);
1215
          EXECUTE_IF_SET_IN_BITMAP (live->livein[bb->index], 0, i, bi)
1216
            {
1217
              print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1218
              fprintf (f, "  ");
1219
            }
1220
          fprintf (f, "\n");
1221
        }
1222
    }
1223
 
1224
  if ((flag & LIVEDUMP_EXIT) && live->liveout)
1225
    {
1226
      FOR_EACH_BB (bb)
1227
        {
1228
          fprintf (f, "\nLive on exit from BB%d : ", bb->index);
1229
          EXECUTE_IF_SET_IN_BITMAP (live->liveout[bb->index], 0, i, bi)
1230
            {
1231
              print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1232
              fprintf (f, "  ");
1233
            }
1234
          fprintf (f, "\n");
1235
        }
1236
    }
1237
}
1238
 
1239
struct GTY(()) numbered_tree_d
1240
{
1241
  tree t;
1242
  int num;
1243
};
1244
typedef struct numbered_tree_d numbered_tree;
1245
 
1246
DEF_VEC_O (numbered_tree);
1247
DEF_VEC_ALLOC_O (numbered_tree, heap);
1248
 
1249
/* Compare two declarations references by their DECL_UID / sequence number.
1250
   Called via qsort.  */
1251
 
1252
static int
1253
compare_decls_by_uid (const void *pa, const void *pb)
1254
{
1255
  const numbered_tree *nt_a = ((const numbered_tree *)pa);
1256
  const numbered_tree *nt_b = ((const numbered_tree *)pb);
1257
 
1258
  if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t))
1259
    return  DECL_UID (nt_a->t) - DECL_UID (nt_b->t);
1260
  return nt_a->num - nt_b->num;
1261
}
1262
 
1263
/* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls.  */
1264
static tree
1265
dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data)
1266
{
1267
  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1268
  VEC (numbered_tree, heap) **list = (VEC (numbered_tree, heap) **) &wi->info;
1269
  numbered_tree nt;
1270
 
1271
  if (!DECL_P (*tp))
1272
    return NULL_TREE;
1273
  nt.t = *tp;
1274
  nt.num = VEC_length (numbered_tree, *list);
1275
  VEC_safe_push (numbered_tree, heap, *list, &nt);
1276
  *walk_subtrees = 0;
1277
  return NULL_TREE;
1278
}
1279
 
1280
/* Find all the declarations used by the current function, sort them by uid,
1281
   and emit the sorted list.  Each declaration is tagged with a sequence
1282
   number indicating when it was found during statement / tree walking,
1283
   so that TDF_NOUID comparisons of anonymous declarations are still
1284
   meaningful.  Where a declaration was encountered more than once, we
1285
   emit only the sequence number of the first encounter.
1286
   FILE is the dump file where to output the list and FLAGS is as in
1287
   print_generic_expr.  */
1288
void
1289
dump_enumerated_decls (FILE *file, int flags)
1290
{
1291
  basic_block bb;
1292
  struct walk_stmt_info wi;
1293
  VEC (numbered_tree, heap) *decl_list = VEC_alloc (numbered_tree, heap, 40);
1294
 
1295
  memset (&wi, '\0', sizeof (wi));
1296
  wi.info = (void*) decl_list;
1297
  FOR_EACH_BB (bb)
1298
    {
1299
      gimple_stmt_iterator gsi;
1300
 
1301
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1302
        if (!is_gimple_debug (gsi_stmt (gsi)))
1303
          walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi);
1304
    }
1305
  decl_list = (VEC (numbered_tree, heap) *) wi.info;
1306
  VEC_qsort (numbered_tree, decl_list, compare_decls_by_uid);
1307
  if (VEC_length (numbered_tree, decl_list))
1308
    {
1309
      unsigned ix;
1310
      numbered_tree *ntp;
1311
      tree last = NULL_TREE;
1312
 
1313
      fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n",
1314
               current_function_name ());
1315
      FOR_EACH_VEC_ELT (numbered_tree, decl_list, ix, ntp)
1316
        {
1317
          if (ntp->t == last)
1318
            continue;
1319
          fprintf (file, "%d: ", ntp->num);
1320
          print_generic_decl (file, ntp->t, flags);
1321
          fprintf (file, "\n");
1322
          last = ntp->t;
1323
        }
1324
    }
1325
  VEC_free (numbered_tree, heap, decl_list);
1326
}
1327
 
1328
#ifdef ENABLE_CHECKING
1329
/* Verify that SSA_VAR is a non-virtual SSA_NAME.  */
1330
 
1331
void
1332
register_ssa_partition_check (tree ssa_var)
1333
{
1334
  gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
1335
  if (!is_gimple_reg (SSA_NAME_VAR (ssa_var)))
1336
    {
1337
      fprintf (stderr, "Illegally registering a virtual SSA name :");
1338
      print_generic_expr (stderr, ssa_var, TDF_SLIM);
1339
      fprintf (stderr, " in the SSA->Normal phase.\n");
1340
      internal_error ("SSA corruption");
1341
    }
1342
}
1343
 
1344
 
1345
/* Verify that the info in LIVE matches the current cfg.  */
1346
 
1347
static void
1348
verify_live_on_entry (tree_live_info_p live)
1349
{
1350
  unsigned i;
1351
  tree var;
1352
  gimple stmt;
1353
  basic_block bb;
1354
  edge e;
1355
  int num;
1356
  edge_iterator ei;
1357
  var_map map = live->map;
1358
 
1359
   /* Check for live on entry partitions and report those with a DEF in
1360
      the program. This will typically mean an optimization has done
1361
      something wrong.  */
1362
  bb = ENTRY_BLOCK_PTR;
1363
  num = 0;
1364
  FOR_EACH_EDGE (e, ei, bb->succs)
1365
    {
1366
      int entry_block = e->dest->index;
1367
      if (e->dest == EXIT_BLOCK_PTR)
1368
        continue;
1369
      for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1370
        {
1371
          basic_block tmp;
1372
          tree d;
1373
          bitmap loe;
1374
          var = partition_to_var (map, i);
1375
          stmt = SSA_NAME_DEF_STMT (var);
1376
          tmp = gimple_bb (stmt);
1377
          d = gimple_default_def (cfun, SSA_NAME_VAR (var));
1378
 
1379
          loe = live_on_entry (live, e->dest);
1380
          if (loe && bitmap_bit_p (loe, i))
1381
            {
1382
              if (!gimple_nop_p (stmt))
1383
                {
1384
                  num++;
1385
                  print_generic_expr (stderr, var, TDF_SLIM);
1386
                  fprintf (stderr, " is defined ");
1387
                  if (tmp)
1388
                    fprintf (stderr, " in BB%d, ", tmp->index);
1389
                  fprintf (stderr, "by:\n");
1390
                  print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1391
                  fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
1392
                           entry_block);
1393
                  fprintf (stderr, " So it appears to have multiple defs.\n");
1394
                }
1395
              else
1396
                {
1397
                  if (d != var)
1398
                    {
1399
                      num++;
1400
                      print_generic_expr (stderr, var, TDF_SLIM);
1401
                      fprintf (stderr, " is live-on-entry to BB%d ",
1402
                               entry_block);
1403
                      if (d)
1404
                        {
1405
                          fprintf (stderr, " but is not the default def of ");
1406
                          print_generic_expr (stderr, d, TDF_SLIM);
1407
                          fprintf (stderr, "\n");
1408
                        }
1409
                      else
1410
                        fprintf (stderr, " and there is no default def.\n");
1411
                    }
1412
                }
1413
            }
1414
          else
1415
            if (d == var)
1416
              {
1417
                /* The only way this var shouldn't be marked live on entry is
1418
                   if it occurs in a PHI argument of the block.  */
1419
                size_t z;
1420
                bool ok = false;
1421
                gimple_stmt_iterator gsi;
1422
                for (gsi = gsi_start_phis (e->dest);
1423
                     !gsi_end_p (gsi) && !ok;
1424
                     gsi_next (&gsi))
1425
                  {
1426
                    gimple phi = gsi_stmt (gsi);
1427
                    for (z = 0; z < gimple_phi_num_args (phi); z++)
1428
                      if (var == gimple_phi_arg_def (phi, z))
1429
                        {
1430
                          ok = true;
1431
                          break;
1432
                        }
1433
                  }
1434
                if (ok)
1435
                  continue;
1436
                num++;
1437
                print_generic_expr (stderr, var, TDF_SLIM);
1438
                fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
1439
                         entry_block);
1440
                fprintf (stderr, "but it is a default def so it should be.\n");
1441
              }
1442
        }
1443
    }
1444
  gcc_assert (num <= 0);
1445
}
1446
#endif

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.