OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [ipa.c] - Blame information for rev 749

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Basic IPA optimizations and utilities.
2
   Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 3, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "cgraph.h"
26
#include "tree-pass.h"
27
#include "timevar.h"
28
#include "gimple.h"
29
#include "ggc.h"
30
#include "flags.h"
31
#include "pointer-set.h"
32
#include "target.h"
33
#include "tree-iterator.h"
34
#include "ipa-utils.h"
35
 
36
/* Look for all functions inlined to NODE and update their inlined_to pointers
37
   to INLINED_TO.  */
38
 
39
static void
40
update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
41
{
42
  struct cgraph_edge *e;
43
  for (e = node->callees; e; e = e->next_callee)
44
    if (e->callee->global.inlined_to)
45
      {
46
        e->callee->global.inlined_to = inlined_to;
47
        update_inlined_to_pointer (e->callee, inlined_to);
48
      }
49
}
50
 
51
/* Add cgraph NODE to queue starting at FIRST.
52
 
53
   The queue is linked via AUX pointers and terminated by pointer to 1.
54
   We enqueue nodes at two occasions: when we find them reachable or when we find
55
   their bodies needed for further clonning.  In the second case we mark them
56
   by pointer to 2 after processing so they are re-queue when they become
57
   reachable.  */
58
 
59
static void
60
enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
61
{
62
  /* Node is still in queue; do nothing.  */
63
  if (node->aux && node->aux != (void *) 2)
64
    return;
65
  /* Node was already processed as unreachable, re-enqueue
66
     only if it became reachable now.  */
67
  if (node->aux == (void *)2 && !node->reachable)
68
    return;
69
  node->aux = *first;
70
  *first = node;
71
}
72
 
73
/* Add varpool NODE to queue starting at FIRST.  */
74
 
75
static void
76
enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
77
{
78
  node->aux = *first;
79
  *first = node;
80
}
81
 
82
/* Process references.  */
83
 
84
static void
85
process_references (struct ipa_ref_list *list,
86
                    struct cgraph_node **first,
87
                    struct varpool_node **first_varpool,
88
                    bool before_inlining_p)
89
{
90
  int i;
91
  struct ipa_ref *ref;
92
  for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
93
    {
94
      if (ref->refered_type == IPA_REF_CGRAPH)
95
        {
96
          struct cgraph_node *node = ipa_ref_node (ref);
97
          if (!node->reachable
98
              && node->analyzed
99
              && (!DECL_EXTERNAL (node->decl)
100
                  || before_inlining_p))
101
            node->reachable = true;
102
          enqueue_cgraph_node (node, first);
103
        }
104
      else
105
        {
106
          struct varpool_node *node = ipa_ref_varpool_node (ref);
107
          if (!node->needed)
108
            {
109
              varpool_mark_needed_node (node);
110
              enqueue_varpool_node (node, first_varpool);
111
            }
112
        }
113
    }
114
}
115
 
116
 
117
/* Return true when NODE can not be local. Worker for cgraph_local_node_p.  */
118
 
119
static bool
120
cgraph_non_local_node_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
121
{
122
   /* FIXME: Aliases can be local, but i386 gets thunks wrong then.  */
123
   return !(cgraph_only_called_directly_or_aliased_p (node)
124
            && !ipa_ref_has_aliases_p (&node->ref_list)
125
            && node->analyzed
126
            && !DECL_EXTERNAL (node->decl)
127
            && !node->local.externally_visible
128
            && !node->reachable_from_other_partition
129
            && !node->in_other_partition);
130
}
131
 
132
/* Return true when function can be marked local.  */
133
 
134
static bool
135
cgraph_local_node_p (struct cgraph_node *node)
136
{
137
   struct cgraph_node *n = cgraph_function_or_thunk_node (node, NULL);
138
 
139
   /* FIXME: thunks can be considered local, but we need prevent i386
140
      from attempting to change calling convention of them.  */
141
   if (n->thunk.thunk_p)
142
     return false;
143
   return !cgraph_for_node_and_aliases (n,
144
                                        cgraph_non_local_node_p_1, NULL, true);
145
 
146
}
147
 
148
/* Return true when NODE has ADDR reference.  */
149
 
150
static bool
151
has_addr_references_p (struct cgraph_node *node,
152
                       void *data ATTRIBUTE_UNUSED)
153
{
154
  int i;
155
  struct ipa_ref *ref;
156
 
157
  for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
158
    if (ref->use == IPA_REF_ADDR)
159
      return true;
160
  return false;
161
}
162
 
163
/* Perform reachability analysis and reclaim all unreachable nodes.
164
   If BEFORE_INLINING_P is true this function is called before inlining
165
   decisions has been made.  If BEFORE_INLINING_P is false this function also
166
   removes unneeded bodies of extern inline functions.  */
167
 
168
bool
169
cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
170
{
171
  struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
172
  struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
173
  struct cgraph_node *node, *next;
174
  struct varpool_node *vnode, *vnext;
175
  bool changed = false;
176
 
177
#ifdef ENABLE_CHECKING
178
  verify_cgraph ();
179
#endif
180
  if (file)
181
    fprintf (file, "\nReclaiming functions:");
182
#ifdef ENABLE_CHECKING
183
  for (node = cgraph_nodes; node; node = node->next)
184
    gcc_assert (!node->aux);
185
  for (vnode = varpool_nodes; vnode; vnode = vnode->next)
186
    gcc_assert (!vnode->aux);
187
#endif
188
  varpool_reset_queue ();
189
  /* Mark functions whose bodies are obviously needed.
190
     This is mostly when they can be referenced externally.  Inline clones
191
     are special since their declarations are shared with master clone and thus
192
     cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them.  */
193
  for (node = cgraph_nodes; node; node = node->next)
194
    if (node->analyzed && !node->global.inlined_to
195
        && (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
196
            /* Keep around virtual functions for possible devirtualization.  */
197
            || (before_inlining_p
198
                && DECL_VIRTUAL_P (node->decl)
199
                && (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl)))))
200
      {
201
        gcc_assert (!node->global.inlined_to);
202
        enqueue_cgraph_node (node, &first);
203
        node->reachable = true;
204
      }
205
    else
206
      {
207
        gcc_assert (!node->aux);
208
        node->reachable = false;
209
      }
210
 
211
  /* Mark variables that are obviously needed.  */
212
  for (vnode = varpool_nodes; vnode; vnode = vnode->next)
213
    {
214
      vnode->next_needed = NULL;
215
      vnode->prev_needed = NULL;
216
      if ((vnode->analyzed || vnode->force_output)
217
          && !varpool_can_remove_if_no_refs (vnode))
218
        {
219
          vnode->needed = false;
220
          varpool_mark_needed_node (vnode);
221
          enqueue_varpool_node (vnode, &first_varpool);
222
        }
223
      else
224
        vnode->needed = false;
225
    }
226
 
227
  /* Perform reachability analysis.  As a special case do not consider
228
     extern inline functions not inlined as live because we won't output
229
     them at all.
230
 
231
     We maintain two worklist, one for cgraph nodes other for varpools and
232
     are finished once both are empty.  */
233
 
234
  while (first != (struct cgraph_node *) (void *) 1
235
         || first_varpool != (struct varpool_node *) (void *) 1)
236
    {
237
      if (first != (struct cgraph_node *) (void *) 1)
238
        {
239
          struct cgraph_edge *e;
240
          node = first;
241
          first = (struct cgraph_node *) first->aux;
242
          if (!node->reachable)
243
            node->aux = (void *)2;
244
 
245
          /* If we found this node reachable, first mark on the callees
246
             reachable too, unless they are direct calls to extern inline functions
247
             we decided to not inline.  */
248
          if (node->reachable)
249
            {
250
              for (e = node->callees; e; e = e->next_callee)
251
                {
252
                  if (!e->callee->reachable
253
                      && node->analyzed
254
                      && (!e->inline_failed
255
                          || !DECL_EXTERNAL (e->callee->decl)
256
                          || before_inlining_p))
257
                    e->callee->reachable = true;
258
                  enqueue_cgraph_node (e->callee, &first);
259
                }
260
              process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
261
            }
262
 
263
          /* If any function in a comdat group is reachable, force
264
             all other functions in the same comdat group to be
265
             also reachable.  */
266
          if (node->same_comdat_group
267
              && node->reachable
268
              && !node->global.inlined_to)
269
            {
270
              for (next = node->same_comdat_group;
271
                   next != node;
272
                   next = next->same_comdat_group)
273
                if (!next->reachable)
274
                  {
275
                    next->reachable = true;
276
                    enqueue_cgraph_node (next, &first);
277
                  }
278
            }
279
 
280
          /* We can freely remove inline clones even if they are cloned, however if
281
             function is clone of real clone, we must keep it around in order to
282
             make materialize_clones produce function body with the changes
283
             applied.  */
284
          while (node->clone_of && !node->clone_of->aux
285
                 && !gimple_has_body_p (node->decl))
286
            {
287
              bool noninline = node->clone_of->decl != node->decl;
288
              node = node->clone_of;
289
              if (noninline && !node->reachable && !node->aux)
290
                {
291
                  enqueue_cgraph_node (node, &first);
292
                  break;
293
                }
294
            }
295
        }
296
      if (first_varpool != (struct varpool_node *) (void *) 1)
297
        {
298
          vnode = first_varpool;
299
          first_varpool = (struct varpool_node *)first_varpool->aux;
300
          vnode->aux = NULL;
301
          process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
302
          /* If any function in a comdat group is reachable, force
303
             all other functions in the same comdat group to be
304
             also reachable.  */
305
          if (vnode->same_comdat_group)
306
            {
307
              struct varpool_node *next;
308
              for (next = vnode->same_comdat_group;
309
                   next != vnode;
310
                   next = next->same_comdat_group)
311
                if (!next->needed)
312
                  {
313
                    varpool_mark_needed_node (next);
314
                    enqueue_varpool_node (next, &first_varpool);
315
                  }
316
            }
317
        }
318
    }
319
 
320
  /* Remove unreachable nodes.
321
 
322
     Completely unreachable functions can be fully removed from the callgraph.
323
     Extern inline functions that we decided to not inline need to become unanalyzed nodes of
324
     callgraph (so we still have edges to them).  We remove function body then.
325
 
326
     Also we need to care functions that are unreachable but we need to keep them around
327
     for later clonning.  In this case we also turn them to unanalyzed nodes, but
328
     keep the body around.  */
329
  for (node = cgraph_nodes; node; node = next)
330
    {
331
      next = node->next;
332
      if (node->aux && !node->reachable)
333
        {
334
          cgraph_node_remove_callees (node);
335
          ipa_remove_all_references (&node->ref_list);
336
          node->analyzed = false;
337
        }
338
      if (!node->aux)
339
        {
340
          struct cgraph_edge *e;
341
          bool found = false;
342
          int i;
343
          struct ipa_ref *ref;
344
 
345
          node->global.inlined_to = NULL;
346
          if (file)
347
            fprintf (file, " %s", cgraph_node_name (node));
348
          /* See if there is reachable caller.  */
349
          for (e = node->callers; e && !found; e = e->next_caller)
350
            if (e->caller->reachable)
351
              found = true;
352
          for (i = 0; (ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
353
                       && !found); i++)
354
            if (ref->refering_type == IPA_REF_CGRAPH
355
                && ipa_ref_refering_node (ref)->reachable)
356
              found = true;
357
            else if (ref->refering_type == IPA_REF_VARPOOL
358
                     && ipa_ref_refering_varpool_node (ref)->needed)
359
              found = true;
360
 
361
          /* If so, we need to keep node in the callgraph.  */
362
          if (found)
363
            {
364
              if (node->analyzed)
365
                {
366
                  struct cgraph_node *clone;
367
 
368
                  /* If there are still clones, we must keep body around.
369
                     Otherwise we can just remove the body but keep the clone.  */
370
                  for (clone = node->clones; clone;
371
                       clone = clone->next_sibling_clone)
372
                    if (clone->aux)
373
                      break;
374
                  if (!clone)
375
                    {
376
                      cgraph_release_function_body (node);
377
                      if (node->prev_sibling_clone)
378
                        node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
379
                      else if (node->clone_of)
380
                        node->clone_of->clones = node->next_sibling_clone;
381
                      if (node->next_sibling_clone)
382
                        node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
383
                      if (node->clone_of)
384
                        node->former_clone_of = node->clone_of->decl;
385
                      node->clone_of = NULL;
386
                      node->next_sibling_clone = NULL;
387
                      node->prev_sibling_clone = NULL;
388
                    }
389
                  else
390
                    gcc_assert (!clone->in_other_partition);
391
                  node->analyzed = false;
392
                  changed = true;
393
                  cgraph_node_remove_callees (node);
394
                  ipa_remove_all_references (&node->ref_list);
395
                }
396
            }
397
          else
398
            {
399
              cgraph_remove_node (node);
400
              changed = true;
401
            }
402
        }
403
    }
404
  for (node = cgraph_nodes; node; node = node->next)
405
    {
406
      /* Inline clones might be kept around so their materializing allows further
407
         cloning.  If the function the clone is inlined into is removed, we need
408
         to turn it into normal cone.  */
409
      if (node->global.inlined_to
410
          && !node->callers)
411
        {
412
          gcc_assert (node->clones);
413
          node->global.inlined_to = NULL;
414
          update_inlined_to_pointer (node, node);
415
        }
416
      node->aux = NULL;
417
    }
418
 
419
  if (file)
420
    fprintf (file, "\n");
421
 
422
  /* We must release unused extern inlines or sanity checking will fail.  Rest of transformations
423
     are undesirable at -O0 since we do not want to remove anything.  */
424
  if (!optimize)
425
    return changed;
426
 
427
  if (file)
428
    fprintf (file, "Reclaiming variables:");
429
  for (vnode = varpool_nodes; vnode; vnode = vnext)
430
    {
431
      vnext = vnode->next;
432
      if (!vnode->needed)
433
        {
434
          if (file)
435
            fprintf (file, " %s", varpool_node_name (vnode));
436
          varpool_remove_node (vnode);
437
          changed = true;
438
        }
439
    }
440
 
441
  /* Now update address_taken flags and try to promote functions to be local.  */
442
 
443
  if (file)
444
    fprintf (file, "\nClearing address taken flags:");
445
  for (node = cgraph_nodes; node; node = node->next)
446
    if (node->address_taken
447
        && !node->reachable_from_other_partition)
448
      {
449
        if (!cgraph_for_node_and_aliases (node, has_addr_references_p, NULL, true))
450
          {
451
            if (file)
452
              fprintf (file, " %s", cgraph_node_name (node));
453
            node->address_taken = false;
454
            changed = true;
455
            if (cgraph_local_node_p (node))
456
              {
457
                node->local.local = true;
458
                if (file)
459
                  fprintf (file, " (local)");
460
              }
461
          }
462
      }
463
  if (file)
464
    fprintf (file, "\n");
465
 
466
#ifdef ENABLE_CHECKING
467
  verify_cgraph ();
468
#endif
469
 
470
  /* Reclaim alias pairs for functions that have disappeared from the
471
     call graph.  */
472
  remove_unreachable_alias_pairs ();
473
 
474
  return changed;
475
}
476
 
477
/* Discover variables that have no longer address taken or that are read only
478
   and update their flags.
479
 
480
   FIXME: This can not be done in between gimplify and omp_expand since
481
   readonly flag plays role on what is shared and what is not.  Currently we do
482
   this transformation as part of whole program visibility and re-do at
483
   ipa-reference pass (to take into account clonning), but it would
484
   make sense to do it before early optimizations.  */
485
 
486
void
487
ipa_discover_readonly_nonaddressable_vars (void)
488
{
489
  struct varpool_node *vnode;
490
  if (dump_file)
491
    fprintf (dump_file, "Clearing variable flags:");
492
  for (vnode = varpool_nodes; vnode; vnode = vnode->next)
493
    if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
494
        && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
495
      {
496
        bool written = false;
497
        bool address_taken = false;
498
        int i;
499
        struct ipa_ref *ref;
500
        for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
501
                    && (!written || !address_taken); i++)
502
          switch (ref->use)
503
            {
504
            case IPA_REF_ADDR:
505
              address_taken = true;
506
              break;
507
            case IPA_REF_LOAD:
508
              break;
509
            case IPA_REF_STORE:
510
              written = true;
511
              break;
512
            }
513
        if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
514
          {
515
            if (dump_file)
516
              fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
517
            TREE_ADDRESSABLE (vnode->decl) = 0;
518
          }
519
        if (!TREE_READONLY (vnode->decl) && !address_taken && !written
520
            /* Making variable in explicit section readonly can cause section
521
               type conflict.
522
               See e.g. gcc.c-torture/compile/pr23237.c */
523
            && DECL_SECTION_NAME (vnode->decl) == NULL)
524
          {
525
            if (dump_file)
526
              fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
527
            TREE_READONLY (vnode->decl) = 1;
528
          }
529
      }
530
  if (dump_file)
531
    fprintf (dump_file, "\n");
532
}
533
 
534
/* Return true when there is a reference to node and it is not vtable.  */
535
static bool
536
cgraph_address_taken_from_non_vtable_p (struct cgraph_node *node)
537
{
538
  int i;
539
  struct ipa_ref *ref;
540
  for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
541
    if (ref->use == IPA_REF_ADDR)
542
      {
543
        struct varpool_node *node;
544
        if (ref->refering_type == IPA_REF_CGRAPH)
545
          return true;
546
        node = ipa_ref_refering_varpool_node (ref);
547
        if (!DECL_VIRTUAL_P (node->decl))
548
          return true;
549
      }
550
  return false;
551
}
552
 
553
/* COMDAT functions must be shared only if they have address taken,
554
   otherwise we can produce our own private implementation with
555
   -fwhole-program.
556
   Return true when turning COMDAT functoin static can not lead to wrong
557
   code when the resulting object links with a library defining same COMDAT.
558
 
559
   Virtual functions do have their addresses taken from the vtables,
560
   but in C++ there is no way to compare their addresses for equality.  */
561
 
562
bool
563
cgraph_comdat_can_be_unshared_p (struct cgraph_node *node)
564
{
565
  if ((cgraph_address_taken_from_non_vtable_p (node)
566
       && !DECL_VIRTUAL_P (node->decl))
567
      || !node->analyzed)
568
    return false;
569
  if (node->same_comdat_group)
570
    {
571
      struct cgraph_node *next;
572
 
573
      /* If more than one function is in the same COMDAT group, it must
574
         be shared even if just one function in the comdat group has
575
         address taken.  */
576
      for (next = node->same_comdat_group;
577
           next != node; next = next->same_comdat_group)
578
        if (cgraph_address_taken_from_non_vtable_p (next)
579
            && !DECL_VIRTUAL_P (next->decl))
580
          return false;
581
    }
582
  return true;
583
}
584
 
585
/* Return true when function NODE should be considered externally visible.  */
586
 
587
static bool
588
cgraph_externally_visible_p (struct cgraph_node *node,
589
                             bool whole_program, bool aliased)
590
{
591
  if (!node->local.finalized)
592
    return false;
593
  if (!DECL_COMDAT (node->decl)
594
      && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
595
    return false;
596
 
597
  /* Do not even try to be smart about aliased nodes.  Until we properly
598
     represent everything by same body alias, these are just evil.  */
599
  if (aliased)
600
    return true;
601
 
602
  /* Do not try to localize built-in functions yet.  One of problems is that we
603
     end up mangling their asm for WHOPR that makes it impossible to call them
604
     using the implicit built-in declarations anymore.  Similarly this enables
605
     us to remove them as unreachable before actual calls may appear during
606
     expansion or folding.  */
607
  if (DECL_BUILT_IN (node->decl))
608
    return true;
609
 
610
  /* If linker counts on us, we must preserve the function.  */
611
  if (cgraph_used_from_object_file_p (node))
612
    return true;
613
  if (DECL_PRESERVE_P (node->decl))
614
    return true;
615
  if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
616
    return true;
617
  if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
618
      && lookup_attribute ("dllexport", DECL_ATTRIBUTES (node->decl)))
619
    return true;
620
  if (node->resolution == LDPR_PREVAILING_DEF_IRONLY)
621
    return false;
622
  /* When doing LTO or whole program, we can bring COMDAT functoins static.
623
     This improves code quality and we know we will duplicate them at most twice
624
     (in the case that we are not using plugin and link with object file
625
      implementing same COMDAT)  */
626
  if ((in_lto_p || whole_program)
627
      && DECL_COMDAT (node->decl)
628
      && cgraph_comdat_can_be_unshared_p (node))
629
    return false;
630
 
631
  /* When doing link time optimizations, hidden symbols become local.  */
632
  if (in_lto_p
633
      && (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
634
          || DECL_VISIBILITY (node->decl) == VISIBILITY_INTERNAL)
635
      /* Be sure that node is defined in IR file, not in other object
636
         file.  In that case we don't set used_from_other_object_file.  */
637
      && node->analyzed)
638
    ;
639
  else if (!whole_program)
640
    return true;
641
 
642
  if (MAIN_NAME_P (DECL_NAME (node->decl)))
643
    return true;
644
 
645
  return false;
646
}
647
 
648
/* Return true when variable VNODE should be considered externally visible.  */
649
 
650
bool
651
varpool_externally_visible_p (struct varpool_node *vnode, bool aliased)
652
{
653
  if (!DECL_COMDAT (vnode->decl) && !TREE_PUBLIC (vnode->decl))
654
    return false;
655
 
656
  /* Do not even try to be smart about aliased nodes.  Until we properly
657
     represent everything by same body alias, these are just evil.  */
658
  if (aliased)
659
    return true;
660
 
661
  /* If linker counts on us, we must preserve the function.  */
662
  if (varpool_used_from_object_file_p (vnode))
663
    return true;
664
 
665
  if (DECL_HARD_REGISTER (vnode->decl))
666
    return true;
667
  if (DECL_PRESERVE_P (vnode->decl))
668
    return true;
669
  if (lookup_attribute ("externally_visible",
670
                        DECL_ATTRIBUTES (vnode->decl)))
671
    return true;
672
  if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
673
      && lookup_attribute ("dllexport",
674
                           DECL_ATTRIBUTES (vnode->decl)))
675
    return true;
676
 
677
  /* See if we have linker information about symbol not being used or
678
     if we need to make guess based on the declaration.
679
 
680
     Even if the linker clams the symbol is unused, never bring internal
681
     symbols that are declared by user as used or externally visible.
682
     This is needed for i.e. references from asm statements.   */
683
  if (varpool_used_from_object_file_p (vnode))
684
    return true;
685
  if (vnode->resolution == LDPR_PREVAILING_DEF_IRONLY)
686
    return false;
687
 
688
  /* As a special case, the COMDAT virutal tables can be unshared.
689
     In LTO mode turn vtables into static variables.  The variable is readonly,
690
     so this does not enable more optimization, but referring static var
691
     is faster for dynamic linking.  Also this match logic hidding vtables
692
     from LTO symbol tables.  */
693
  if ((in_lto_p || flag_whole_program)
694
      && !vnode->force_output
695
      && DECL_COMDAT (vnode->decl) && DECL_VIRTUAL_P (vnode->decl))
696
    return false;
697
 
698
  /* When doing link time optimizations, hidden symbols become local.  */
699
  if (in_lto_p
700
      && (DECL_VISIBILITY (vnode->decl) == VISIBILITY_HIDDEN
701
          || DECL_VISIBILITY (vnode->decl) == VISIBILITY_INTERNAL)
702
      /* Be sure that node is defined in IR file, not in other object
703
         file.  In that case we don't set used_from_other_object_file.  */
704
      && vnode->finalized)
705
    ;
706
  else if (!flag_whole_program)
707
    return true;
708
 
709
  /* Do not attempt to privatize COMDATS by default.
710
     This would break linking with C++ libraries sharing
711
     inline definitions.
712
 
713
     FIXME: We can do so for readonly vars with no address taken and
714
     possibly also for vtables since no direct pointer comparsion is done.
715
     It might be interesting to do so to reduce linking overhead.  */
716
  if (DECL_COMDAT (vnode->decl) || DECL_WEAK (vnode->decl))
717
    return true;
718
  return false;
719
}
720
 
721
/* Dissolve the same_comdat_group list in which NODE resides.  */
722
 
723
static void
724
dissolve_same_comdat_group_list (struct cgraph_node *node)
725
{
726
  struct cgraph_node *n = node, *next;
727
  do
728
    {
729
      next = n->same_comdat_group;
730
      n->same_comdat_group = NULL;
731
      n = next;
732
    }
733
  while (n != node);
734
}
735
 
736
/* Mark visibility of all functions.
737
 
738
   A local function is one whose calls can occur only in the current
739
   compilation unit and all its calls are explicit, so we can change
740
   its calling convention.  We simply mark all static functions whose
741
   address is not taken as local.
742
 
743
   We also change the TREE_PUBLIC flag of all declarations that are public
744
   in language point of view but we want to overwrite this default
745
   via visibilities for the backend point of view.  */
746
 
747
static unsigned int
748
function_and_variable_visibility (bool whole_program)
749
{
750
  struct cgraph_node *node;
751
  struct varpool_node *vnode;
752
  struct pointer_set_t *aliased_nodes = pointer_set_create ();
753
  struct pointer_set_t *aliased_vnodes = pointer_set_create ();
754
  unsigned i;
755
  alias_pair *p;
756
 
757
  /* Discover aliased nodes.  */
758
  FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
759
    {
760
      if (dump_file)
761
       fprintf (dump_file, "Alias %s->%s",
762
                IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
763
                IDENTIFIER_POINTER (p->target));
764
 
765
      if ((node = cgraph_node_for_asm (p->target)) != NULL
766
          && !DECL_EXTERNAL (node->decl))
767
        {
768
          if (!node->analyzed)
769
            continue;
770
          cgraph_mark_needed_node (node);
771
          gcc_assert (node->needed);
772
          pointer_set_insert (aliased_nodes, node);
773
          if (dump_file)
774
            fprintf (dump_file, "  node %s/%i",
775
                     cgraph_node_name (node), node->uid);
776
        }
777
      else if ((vnode = varpool_node_for_asm (p->target)) != NULL
778
               && !DECL_EXTERNAL (vnode->decl))
779
        {
780
          varpool_mark_needed_node (vnode);
781
          gcc_assert (vnode->needed);
782
          pointer_set_insert (aliased_vnodes, vnode);
783
          if (dump_file)
784
            fprintf (dump_file, "  varpool node %s",
785
                     varpool_node_name (vnode));
786
        }
787
      if (dump_file)
788
       fprintf (dump_file, "\n");
789
    }
790
 
791
  for (node = cgraph_nodes; node; node = node->next)
792
    {
793
      int flags = flags_from_decl_or_type (node->decl);
794
 
795
      /* Optimize away PURE and CONST constructors and destructors.  */
796
      if (optimize
797
          && (flags & (ECF_CONST | ECF_PURE))
798
          && !(flags & ECF_LOOPING_CONST_OR_PURE))
799
        {
800
          DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
801
          DECL_STATIC_DESTRUCTOR (node->decl) = 0;
802
        }
803
 
804
      /* Frontends and alias code marks nodes as needed before parsing is finished.
805
         We may end up marking as node external nodes where this flag is meaningless
806
         strip it.  */
807
      if (node->needed
808
          && (DECL_EXTERNAL (node->decl) || !node->analyzed))
809
        node->needed = 0;
810
 
811
      /* C++ FE on lack of COMDAT support create local COMDAT functions
812
         (that ought to be shared but can not due to object format
813
         limitations).  It is neccesary to keep the flag to make rest of C++ FE
814
         happy.  Clear the flag here to avoid confusion in middle-end.  */
815
      if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
816
        DECL_COMDAT (node->decl) = 0;
817
      /* For external decls stop tracking same_comdat_group, it doesn't matter
818
         what comdat group they are in when they won't be emitted in this TU,
819
         and simplifies later passes.  */
820
      if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
821
        {
822
#ifdef ENABLE_CHECKING
823
          struct cgraph_node *n;
824
 
825
          for (n = node->same_comdat_group;
826
               n != node;
827
               n = n->same_comdat_group)
828
              /* If at least one of same comdat group functions is external,
829
                 all of them have to be, otherwise it is a front-end bug.  */
830
              gcc_assert (DECL_EXTERNAL (n->decl));
831
#endif
832
          dissolve_same_comdat_group_list (node);
833
        }
834
      gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
835
                  || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
836
      if (cgraph_externally_visible_p (node, whole_program,
837
                                       pointer_set_contains (aliased_nodes,
838
                                                             node)))
839
        {
840
          gcc_assert (!node->global.inlined_to);
841
          node->local.externally_visible = true;
842
        }
843
      else
844
        node->local.externally_visible = false;
845
      if (!node->local.externally_visible && node->analyzed
846
          && !DECL_EXTERNAL (node->decl))
847
        {
848
          gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
849
          cgraph_make_decl_local (node->decl);
850
          node->resolution = LDPR_PREVAILING_DEF_IRONLY;
851
          if (node->same_comdat_group)
852
            /* cgraph_externally_visible_p has already checked all other nodes
853
               in the group and they will all be made local.  We need to
854
               dissolve the group at once so that the predicate does not
855
               segfault though. */
856
            dissolve_same_comdat_group_list (node);
857
        }
858
 
859
      if (node->thunk.thunk_p
860
          && TREE_PUBLIC (node->decl))
861
        {
862
          struct cgraph_node *decl_node = node;
863
 
864
          decl_node = cgraph_function_node (decl_node->callees->callee, NULL);
865
 
866
          /* Thunks have the same visibility as function they are attached to.
867
             Make sure the C++ front end set this up properly.  */
868
          if (DECL_ONE_ONLY (decl_node->decl))
869
            {
870
              gcc_checking_assert (DECL_COMDAT (node->decl)
871
                                   == DECL_COMDAT (decl_node->decl));
872
              gcc_checking_assert (DECL_COMDAT_GROUP (node->decl)
873
                                   == DECL_COMDAT_GROUP (decl_node->decl));
874
              gcc_checking_assert (node->same_comdat_group);
875
            }
876
          if (DECL_EXTERNAL (decl_node->decl))
877
            DECL_EXTERNAL (node->decl) = 1;
878
        }
879
    }
880
  for (node = cgraph_nodes; node; node = node->next)
881
    node->local.local = cgraph_local_node_p (node);
882
  for (vnode = varpool_nodes; vnode; vnode = vnode->next)
883
    {
884
      /* weak flag makes no sense on local variables.  */
885
      gcc_assert (!DECL_WEAK (vnode->decl)
886
                  || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
887
      /* In several cases declarations can not be common:
888
 
889
         - when declaration has initializer
890
         - when it is in weak
891
         - when it has specific section
892
         - when it resides in non-generic address space.
893
         - if declaration is local, it will get into .local common section
894
           so common flag is not needed.  Frontends still produce these in
895
           certain cases, such as for:
896
 
897
             static int a __attribute__ ((common))
898
 
899
         Canonicalize things here and clear the redundant flag.  */
900
      if (DECL_COMMON (vnode->decl)
901
          && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
902
              || (DECL_INITIAL (vnode->decl)
903
                  && DECL_INITIAL (vnode->decl) != error_mark_node)
904
              || DECL_WEAK (vnode->decl)
905
              || DECL_SECTION_NAME (vnode->decl) != NULL
906
              || ! (ADDR_SPACE_GENERIC_P
907
                    (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
908
        DECL_COMMON (vnode->decl) = 0;
909
    }
910
  for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
911
    {
912
      if (!vnode->finalized)
913
        continue;
914
      if (vnode->needed
915
          && varpool_externally_visible_p
916
              (vnode,
917
               pointer_set_contains (aliased_vnodes, vnode)))
918
        vnode->externally_visible = true;
919
      else
920
        vnode->externally_visible = false;
921
      if (!vnode->externally_visible)
922
        {
923
          gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
924
          cgraph_make_decl_local (vnode->decl);
925
          vnode->resolution = LDPR_PREVAILING_DEF_IRONLY;
926
        }
927
     gcc_assert (TREE_STATIC (vnode->decl));
928
    }
929
  pointer_set_destroy (aliased_nodes);
930
  pointer_set_destroy (aliased_vnodes);
931
 
932
  if (dump_file)
933
    {
934
      fprintf (dump_file, "\nMarking local functions:");
935
      for (node = cgraph_nodes; node; node = node->next)
936
        if (node->local.local)
937
          fprintf (dump_file, " %s", cgraph_node_name (node));
938
      fprintf (dump_file, "\n\n");
939
      fprintf (dump_file, "\nMarking externally visible functions:");
940
      for (node = cgraph_nodes; node; node = node->next)
941
        if (node->local.externally_visible)
942
          fprintf (dump_file, " %s", cgraph_node_name (node));
943
      fprintf (dump_file, "\n\n");
944
      fprintf (dump_file, "\nMarking externally visible variables:");
945
      for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
946
        if (vnode->externally_visible)
947
          fprintf (dump_file, " %s", varpool_node_name (vnode));
948
      fprintf (dump_file, "\n\n");
949
    }
950
  cgraph_function_flags_ready = true;
951
  return 0;
952
}
953
 
954
/* Local function pass handling visibilities.  This happens before LTO streaming
955
   so in particular -fwhole-program should be ignored at this level.  */
956
 
957
static unsigned int
958
local_function_and_variable_visibility (void)
959
{
960
  return function_and_variable_visibility (flag_whole_program && !flag_lto);
961
}
962
 
963
struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
964
{
965
 {
966
  SIMPLE_IPA_PASS,
967
  "visibility",                         /* name */
968
  NULL,                                 /* gate */
969
  local_function_and_variable_visibility,/* execute */
970
  NULL,                                 /* sub */
971
  NULL,                                 /* next */
972
  0,                                     /* static_pass_number */
973
  TV_CGRAPHOPT,                         /* tv_id */
974
  0,                                     /* properties_required */
975
  0,                                     /* properties_provided */
976
  0,                                     /* properties_destroyed */
977
  0,                                     /* todo_flags_start */
978
  TODO_remove_functions | TODO_dump_cgraph
979
  | TODO_ggc_collect                    /* todo_flags_finish */
980
 }
981
};
982
 
983
/* Do not re-run on ltrans stage.  */
984
 
985
static bool
986
gate_whole_program_function_and_variable_visibility (void)
987
{
988
  return !flag_ltrans;
989
}
990
 
991
/* Bring functionss local at LTO time whith -fwhole-program.  */
992
 
993
static unsigned int
994
whole_program_function_and_variable_visibility (void)
995
{
996
  struct cgraph_node *node;
997
  struct varpool_node *vnode;
998
 
999
  function_and_variable_visibility (flag_whole_program);
1000
 
1001
  for (node = cgraph_nodes; node; node = node->next)
1002
    if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
1003
        && node->local.finalized)
1004
      cgraph_mark_needed_node (node);
1005
  for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1006
    if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
1007
      varpool_mark_needed_node (vnode);
1008
  if (dump_file)
1009
    {
1010
      fprintf (dump_file, "\nNeeded variables:");
1011
      for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1012
        if (vnode->needed)
1013
          fprintf (dump_file, " %s", varpool_node_name (vnode));
1014
      fprintf (dump_file, "\n\n");
1015
    }
1016
  if (optimize)
1017
    ipa_discover_readonly_nonaddressable_vars ();
1018
  return 0;
1019
}
1020
 
1021
struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
1022
{
1023
 {
1024
  IPA_PASS,
1025
  "whole-program",                      /* name */
1026
  gate_whole_program_function_and_variable_visibility,/* gate */
1027
  whole_program_function_and_variable_visibility,/* execute */
1028
  NULL,                                 /* sub */
1029
  NULL,                                 /* next */
1030
  0,                                     /* static_pass_number */
1031
  TV_CGRAPHOPT,                         /* tv_id */
1032
  0,                                     /* properties_required */
1033
  0,                                     /* properties_provided */
1034
  0,                                     /* properties_destroyed */
1035
  0,                                     /* todo_flags_start */
1036
  TODO_remove_functions | TODO_dump_cgraph
1037
  | TODO_ggc_collect                    /* todo_flags_finish */
1038
 },
1039
 NULL,                                  /* generate_summary */
1040
 NULL,                                  /* write_summary */
1041
 NULL,                                  /* read_summary */
1042
 NULL,                                  /* write_optimization_summary */
1043
 NULL,                                  /* read_optimization_summary */
1044
 NULL,                                  /* stmt_fixup */
1045
 0,                                      /* TODOs */
1046
 NULL,                                  /* function_transform */
1047
 NULL,                                  /* variable_transform */
1048
};
1049
 
1050
 
1051
/* Simple ipa profile pass propagating frequencies across the callgraph.  */
1052
 
1053
static unsigned int
1054
ipa_profile (void)
1055
{
1056
  struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1057
  struct cgraph_edge *e;
1058
  int order_pos;
1059
  bool something_changed = false;
1060
  int i;
1061
 
1062
  order_pos = ipa_reverse_postorder (order);
1063
  for (i = order_pos - 1; i >= 0; i--)
1064
    {
1065
      if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1066
        {
1067
          for (e = order[i]->callees; e; e = e->next_callee)
1068
            if (e->callee->local.local && !e->callee->aux)
1069
              {
1070
                something_changed = true;
1071
                e->callee->aux = (void *)1;
1072
              }
1073
        }
1074
      order[i]->aux = NULL;
1075
    }
1076
 
1077
  while (something_changed)
1078
    {
1079
      something_changed = false;
1080
      for (i = order_pos - 1; i >= 0; i--)
1081
        {
1082
          if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1083
            {
1084
              for (e = order[i]->callees; e; e = e->next_callee)
1085
                if (e->callee->local.local && !e->callee->aux)
1086
                  {
1087
                    something_changed = true;
1088
                    e->callee->aux = (void *)1;
1089
                  }
1090
            }
1091
          order[i]->aux = NULL;
1092
        }
1093
    }
1094
  free (order);
1095
  return 0;
1096
}
1097
 
1098
static bool
1099
gate_ipa_profile (void)
1100
{
1101
  return flag_ipa_profile;
1102
}
1103
 
1104
struct ipa_opt_pass_d pass_ipa_profile =
1105
{
1106
 {
1107
  IPA_PASS,
1108
  "profile_estimate",                   /* name */
1109
  gate_ipa_profile,                     /* gate */
1110
  ipa_profile,                          /* execute */
1111
  NULL,                                 /* sub */
1112
  NULL,                                 /* next */
1113
  0,                                     /* static_pass_number */
1114
  TV_IPA_PROFILE,                       /* tv_id */
1115
  0,                                     /* properties_required */
1116
  0,                                     /* properties_provided */
1117
  0,                                     /* properties_destroyed */
1118
  0,                                     /* todo_flags_start */
1119
 
1120
 },
1121
 NULL,                                  /* generate_summary */
1122
 NULL,                                  /* write_summary */
1123
 NULL,                                  /* read_summary */
1124
 NULL,                                  /* write_optimization_summary */
1125
 NULL,                                  /* read_optimization_summary */
1126
 NULL,                                  /* stmt_fixup */
1127
 0,                                      /* TODOs */
1128
 NULL,                                  /* function_transform */
1129
 NULL                                   /* variable_transform */
1130
};
1131
 
1132
/* Generate and emit a static constructor or destructor.  WHICH must
1133
   be one of 'I' (for a constructor) or 'D' (for a destructor).  BODY
1134
   is a STATEMENT_LIST containing GENERIC statements.  PRIORITY is the
1135
   initialization priority for this constructor or destructor.
1136
 
1137
   FINAL specify whether the externally visible name for collect2 should
1138
   be produced. */
1139
 
1140
static void
1141
cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
1142
{
1143
  static int counter = 0;
1144
  char which_buf[16];
1145
  tree decl, name, resdecl;
1146
 
1147
  /* The priority is encoded in the constructor or destructor name.
1148
     collect2 will sort the names and arrange that they are called at
1149
     program startup.  */
1150
  if (final)
1151
    sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1152
  else
1153
  /* Proudce sane name but one not recognizable by collect2, just for the
1154
     case we fail to inline the function.  */
1155
    sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
1156
  name = get_file_function_name (which_buf);
1157
 
1158
  decl = build_decl (input_location, FUNCTION_DECL, name,
1159
                     build_function_type_list (void_type_node, NULL_TREE));
1160
  current_function_decl = decl;
1161
 
1162
  resdecl = build_decl (input_location,
1163
                        RESULT_DECL, NULL_TREE, void_type_node);
1164
  DECL_ARTIFICIAL (resdecl) = 1;
1165
  DECL_RESULT (decl) = resdecl;
1166
  DECL_CONTEXT (resdecl) = decl;
1167
 
1168
  allocate_struct_function (decl, false);
1169
 
1170
  TREE_STATIC (decl) = 1;
1171
  TREE_USED (decl) = 1;
1172
  DECL_ARTIFICIAL (decl) = 1;
1173
  DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1174
  DECL_SAVED_TREE (decl) = body;
1175
  if (!targetm.have_ctors_dtors && final)
1176
    {
1177
      TREE_PUBLIC (decl) = 1;
1178
      DECL_PRESERVE_P (decl) = 1;
1179
    }
1180
  DECL_UNINLINABLE (decl) = 1;
1181
 
1182
  DECL_INITIAL (decl) = make_node (BLOCK);
1183
  TREE_USED (DECL_INITIAL (decl)) = 1;
1184
 
1185
  DECL_SOURCE_LOCATION (decl) = input_location;
1186
  cfun->function_end_locus = input_location;
1187
 
1188
  switch (which)
1189
    {
1190
    case 'I':
1191
      DECL_STATIC_CONSTRUCTOR (decl) = 1;
1192
      decl_init_priority_insert (decl, priority);
1193
      break;
1194
    case 'D':
1195
      DECL_STATIC_DESTRUCTOR (decl) = 1;
1196
      decl_fini_priority_insert (decl, priority);
1197
      break;
1198
    default:
1199
      gcc_unreachable ();
1200
    }
1201
 
1202
  gimplify_function_tree (decl);
1203
 
1204
  cgraph_add_new_function (decl, false);
1205
 
1206
  set_cfun (NULL);
1207
  current_function_decl = NULL;
1208
}
1209
 
1210
/* Generate and emit a static constructor or destructor.  WHICH must
1211
   be one of 'I' (for a constructor) or 'D' (for a destructor).  BODY
1212
   is a STATEMENT_LIST containing GENERIC statements.  PRIORITY is the
1213
   initialization priority for this constructor or destructor.  */
1214
 
1215
void
1216
cgraph_build_static_cdtor (char which, tree body, int priority)
1217
{
1218
  cgraph_build_static_cdtor_1 (which, body, priority, false);
1219
}
1220
 
1221
/* A vector of FUNCTION_DECLs declared as static constructors.  */
1222
static VEC(tree, heap) *static_ctors;
1223
/* A vector of FUNCTION_DECLs declared as static destructors.  */
1224
static VEC(tree, heap) *static_dtors;
1225
 
1226
/* When target does not have ctors and dtors, we call all constructor
1227
   and destructor by special initialization/destruction function
1228
   recognized by collect2.
1229
 
1230
   When we are going to build this function, collect all constructors and
1231
   destructors and turn them into normal functions.  */
1232
 
1233
static void
1234
record_cdtor_fn (struct cgraph_node *node)
1235
{
1236
  if (DECL_STATIC_CONSTRUCTOR (node->decl))
1237
    VEC_safe_push (tree, heap, static_ctors, node->decl);
1238
  if (DECL_STATIC_DESTRUCTOR (node->decl))
1239
    VEC_safe_push (tree, heap, static_dtors, node->decl);
1240
  node = cgraph_get_node (node->decl);
1241
  DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
1242
}
1243
 
1244
/* Define global constructors/destructor functions for the CDTORS, of
1245
   which they are LEN.  The CDTORS are sorted by initialization
1246
   priority.  If CTOR_P is true, these are constructors; otherwise,
1247
   they are destructors.  */
1248
 
1249
static void
1250
build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
1251
{
1252
  size_t i,j;
1253
  size_t len = VEC_length (tree, cdtors);
1254
 
1255
  i = 0;
1256
  while (i < len)
1257
    {
1258
      tree body;
1259
      tree fn;
1260
      priority_type priority;
1261
 
1262
      priority = 0;
1263
      body = NULL_TREE;
1264
      j = i;
1265
      do
1266
        {
1267
          priority_type p;
1268
          fn = VEC_index (tree, cdtors, j);
1269
          p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1270
          if (j == i)
1271
            priority = p;
1272
          else if (p != priority)
1273
            break;
1274
          j++;
1275
        }
1276
      while (j < len);
1277
 
1278
      /* When there is only one cdtor and target supports them, do nothing.  */
1279
      if (j == i + 1
1280
          && targetm.have_ctors_dtors)
1281
        {
1282
          i++;
1283
          continue;
1284
        }
1285
      /* Find the next batch of constructors/destructors with the same
1286
         initialization priority.  */
1287
      for (;i < j; i++)
1288
        {
1289
          tree call;
1290
          fn = VEC_index (tree, cdtors, i);
1291
          call = build_call_expr (fn, 0);
1292
          if (ctor_p)
1293
            DECL_STATIC_CONSTRUCTOR (fn) = 0;
1294
          else
1295
            DECL_STATIC_DESTRUCTOR (fn) = 0;
1296
          /* We do not want to optimize away pure/const calls here.
1297
             When optimizing, these should be already removed, when not
1298
             optimizing, we want user to be able to breakpoint in them.  */
1299
          TREE_SIDE_EFFECTS (call) = 1;
1300
          append_to_statement_list (call, &body);
1301
        }
1302
      gcc_assert (body != NULL_TREE);
1303
      /* Generate a function to call all the function of like
1304
         priority.  */
1305
      cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
1306
    }
1307
}
1308
 
1309
/* Comparison function for qsort.  P1 and P2 are actually of type
1310
   "tree *" and point to static constructors.  DECL_INIT_PRIORITY is
1311
   used to determine the sort order.  */
1312
 
1313
static int
1314
compare_ctor (const void *p1, const void *p2)
1315
{
1316
  tree f1;
1317
  tree f2;
1318
  int priority1;
1319
  int priority2;
1320
 
1321
  f1 = *(const tree *)p1;
1322
  f2 = *(const tree *)p2;
1323
  priority1 = DECL_INIT_PRIORITY (f1);
1324
  priority2 = DECL_INIT_PRIORITY (f2);
1325
 
1326
  if (priority1 < priority2)
1327
    return -1;
1328
  else if (priority1 > priority2)
1329
    return 1;
1330
  else
1331
    /* Ensure a stable sort.  Constructors are executed in backwarding
1332
       order to make LTO initialize braries first.  */
1333
    return DECL_UID (f2) - DECL_UID (f1);
1334
}
1335
 
1336
/* Comparison function for qsort.  P1 and P2 are actually of type
1337
   "tree *" and point to static destructors.  DECL_FINI_PRIORITY is
1338
   used to determine the sort order.  */
1339
 
1340
static int
1341
compare_dtor (const void *p1, const void *p2)
1342
{
1343
  tree f1;
1344
  tree f2;
1345
  int priority1;
1346
  int priority2;
1347
 
1348
  f1 = *(const tree *)p1;
1349
  f2 = *(const tree *)p2;
1350
  priority1 = DECL_FINI_PRIORITY (f1);
1351
  priority2 = DECL_FINI_PRIORITY (f2);
1352
 
1353
  if (priority1 < priority2)
1354
    return -1;
1355
  else if (priority1 > priority2)
1356
    return 1;
1357
  else
1358
    /* Ensure a stable sort.  */
1359
    return DECL_UID (f1) - DECL_UID (f2);
1360
}
1361
 
1362
/* Generate functions to call static constructors and destructors
1363
   for targets that do not support .ctors/.dtors sections.  These
1364
   functions have magic names which are detected by collect2.  */
1365
 
1366
static void
1367
build_cdtor_fns (void)
1368
{
1369
  if (!VEC_empty (tree, static_ctors))
1370
    {
1371
      gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1372
      VEC_qsort (tree, static_ctors, compare_ctor);
1373
      build_cdtor (/*ctor_p=*/true, static_ctors);
1374
    }
1375
 
1376
  if (!VEC_empty (tree, static_dtors))
1377
    {
1378
      gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1379
      VEC_qsort (tree, static_dtors, compare_dtor);
1380
      build_cdtor (/*ctor_p=*/false, static_dtors);
1381
    }
1382
}
1383
 
1384
/* Look for constructors and destructors and produce function calling them.
1385
   This is needed for targets not supporting ctors or dtors, but we perform the
1386
   transformation also at linktime to merge possibly numberous
1387
   constructors/destructors into single function to improve code locality and
1388
   reduce size.  */
1389
 
1390
static unsigned int
1391
ipa_cdtor_merge (void)
1392
{
1393
  struct cgraph_node *node;
1394
  for (node = cgraph_nodes; node; node = node->next)
1395
    if (node->analyzed
1396
        && (DECL_STATIC_CONSTRUCTOR (node->decl)
1397
            || DECL_STATIC_DESTRUCTOR (node->decl)))
1398
       record_cdtor_fn (node);
1399
  build_cdtor_fns ();
1400
  VEC_free (tree, heap, static_ctors);
1401
  VEC_free (tree, heap, static_dtors);
1402
  return 0;
1403
}
1404
 
1405
/* Perform the pass when we have no ctors/dtors support
1406
   or at LTO time to merge multiple constructors into single
1407
   function.  */
1408
 
1409
static bool
1410
gate_ipa_cdtor_merge (void)
1411
{
1412
  return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1413
}
1414
 
1415
struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1416
{
1417
 {
1418
  IPA_PASS,
1419
  "cdtor",                              /* name */
1420
  gate_ipa_cdtor_merge,                 /* gate */
1421
  ipa_cdtor_merge,                      /* execute */
1422
  NULL,                                 /* sub */
1423
  NULL,                                 /* next */
1424
  0,                                     /* static_pass_number */
1425
  TV_CGRAPHOPT,                         /* tv_id */
1426
  0,                                     /* properties_required */
1427
  0,                                     /* properties_provided */
1428
  0,                                     /* properties_destroyed */
1429
  0,                                     /* todo_flags_start */
1430
 
1431
 },
1432
 NULL,                                  /* generate_summary */
1433
 NULL,                                  /* write_summary */
1434
 NULL,                                  /* read_summary */
1435
 NULL,                                  /* write_optimization_summary */
1436
 NULL,                                  /* read_optimization_summary */
1437
 NULL,                                  /* stmt_fixup */
1438
 0,                                      /* TODOs */
1439
 NULL,                                  /* function_transform */
1440
 NULL                                   /* variable_transform */
1441
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.