OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [cgraphunit.c] - Blame information for rev 280

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Callgraph based interprocedural optimizations.
2
   Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4
   Contributed by Jan Hubicka
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
/* This module implements main driver of compilation process as well as
23
   few basic interprocedural optimizers.
24
 
25
   The main scope of this file is to act as an interface in between
26
   tree based frontends and the backend (and middle end)
27
 
28
   The front-end is supposed to use following functionality:
29
 
30
    - cgraph_finalize_function
31
 
32
      This function is called once front-end has parsed whole body of function
33
      and it is certain that the function body nor the declaration will change.
34
 
35
      (There is one exception needed for implementing GCC extern inline
36
        function.)
37
 
38
    - varpool_finalize_variable
39
 
40
      This function has same behavior as the above but is used for static
41
      variables.
42
 
43
    - cgraph_finalize_compilation_unit
44
 
45
      This function is called once (source level) compilation unit is finalized
46
      and it will no longer change.
47
 
48
      In the the call-graph construction and local function
49
      analysis takes place here.  Bodies of unreachable functions are released
50
      to conserve memory usage.
51
 
52
      The function can be called multiple times when multiple source level
53
      compilation units are combined (such as in C frontend)
54
 
55
    - cgraph_optimize
56
 
57
      In this unit-at-a-time compilation the intra procedural analysis takes
58
      place here.  In particular the static functions whose address is never
59
      taken are marked as local.  Backend can then use this information to
60
      modify calling conventions, do better inlining or similar optimizations.
61
 
62
    - cgraph_mark_needed_node
63
    - varpool_mark_needed_node
64
 
65
      When function or variable is referenced by some hidden way the call-graph
66
      data structure must be updated accordingly by this function.
67
      There should be little need to call this function and all the references
68
      should be made explicit to cgraph code.  At present these functions are
69
      used by C++ frontend to explicitly mark the keyed methods.
70
 
71
    - analyze_expr callback
72
 
73
      This function is responsible for lowering tree nodes not understood by
74
      generic code into understandable ones or alternatively marking
75
      callgraph and varpool nodes referenced by the as needed.
76
 
77
      ??? On the tree-ssa genericizing should take place here and we will avoid
78
      need for these hooks (replacing them by genericizing hook)
79
 
80
        Analyzing of all functions is deferred
81
        to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
 
83
        In cgraph_finalize_compilation_unit the reachable functions are
84
        analyzed.  During analysis the call-graph edges from reachable
85
        functions are constructed and their destinations are marked as
86
        reachable.  References to functions and variables are discovered too
87
        and variables found to be needed output to the assembly file.  Via
88
        mark_referenced call in assemble_variable functions referenced by
89
        static variables are noticed too.
90
 
91
        The intra-procedural information is produced and its existence
92
        indicated by global_info_ready.  Once this flag is set it is impossible
93
        to change function from !reachable to reachable and thus
94
        assemble_variable no longer call mark_referenced.
95
 
96
        Finally the call-graph is topologically sorted and all reachable functions
97
        that has not been completely inlined or are not external are output.
98
 
99
        ??? It is possible that reference to function or variable is optimized
100
        out.  We can not deal with this nicely because topological order is not
101
        suitable for it.  For tree-ssa we may consider another pass doing
102
        optimization and re-discovering reachable functions.
103
 
104
        ??? Reorganize code so variables are output very last and only if they
105
        really has been referenced by produced code, so we catch more cases
106
        where reference has been optimized out.  */
107
 
108
 
109
#include "config.h"
110
#include "system.h"
111
#include "coretypes.h"
112
#include "tm.h"
113
#include "tree.h"
114
#include "rtl.h"
115
#include "tree-flow.h"
116
#include "tree-inline.h"
117
#include "langhooks.h"
118
#include "pointer-set.h"
119
#include "toplev.h"
120
#include "flags.h"
121
#include "ggc.h"
122
#include "debug.h"
123
#include "target.h"
124
#include "cgraph.h"
125
#include "diagnostic.h"
126
#include "timevar.h"
127
#include "params.h"
128
#include "fibheap.h"
129
#include "intl.h"
130
#include "function.h"
131
#include "ipa-prop.h"
132
#include "gimple.h"
133
#include "tree-iterator.h"
134
#include "tree-pass.h"
135
#include "tree-dump.h"
136
#include "output.h"
137
#include "coverage.h"
138
#include "plugin.h"
139
 
140
static void cgraph_expand_all_functions (void);
141
static void cgraph_mark_functions_to_output (void);
142
static void cgraph_expand_function (struct cgraph_node *);
143
static void cgraph_output_pending_asms (void);
144
static void cgraph_analyze_function (struct cgraph_node *);
145
 
146
static FILE *cgraph_dump_file;
147
 
148
/* A vector of FUNCTION_DECLs declared as static constructors.  */
149
static GTY (()) VEC(tree, gc) *static_ctors;
150
/* A vector of FUNCTION_DECLs declared as static destructors.  */
151
static GTY (()) VEC(tree, gc) *static_dtors;
152
 
153
/* Used for vtable lookup in thunk adjusting.  */
154
static GTY (()) tree vtable_entry_type;
155
 
156
/* When target does not have ctors and dtors, we call all constructor
157
   and destructor by special initialization/destruction function
158
   recognized by collect2.
159
 
160
   When we are going to build this function, collect all constructors and
161
   destructors and turn them into normal functions.  */
162
 
163
static void
164
record_cdtor_fn (tree fndecl)
165
{
166
  struct cgraph_node *node;
167
  if (targetm.have_ctors_dtors
168
      || (!DECL_STATIC_CONSTRUCTOR (fndecl)
169
          && !DECL_STATIC_DESTRUCTOR (fndecl)))
170
    return;
171
 
172
  if (DECL_STATIC_CONSTRUCTOR (fndecl))
173
    {
174
      VEC_safe_push (tree, gc, static_ctors, fndecl);
175
      DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
176
    }
177
  if (DECL_STATIC_DESTRUCTOR (fndecl))
178
    {
179
      VEC_safe_push (tree, gc, static_dtors, fndecl);
180
      DECL_STATIC_DESTRUCTOR (fndecl) = 0;
181
    }
182
  node = cgraph_node (fndecl);
183
  node->local.disregard_inline_limits = 1;
184
  cgraph_mark_reachable_node (node);
185
}
186
 
187
/* Define global constructors/destructor functions for the CDTORS, of
188
   which they are LEN.  The CDTORS are sorted by initialization
189
   priority.  If CTOR_P is true, these are constructors; otherwise,
190
   they are destructors.  */
191
 
192
static void
193
build_cdtor (bool ctor_p, tree *cdtors, size_t len)
194
{
195
  size_t i;
196
 
197
  i = 0;
198
  while (i < len)
199
    {
200
      tree body;
201
      tree fn;
202
      priority_type priority;
203
 
204
      priority = 0;
205
      body = NULL_TREE;
206
      /* Find the next batch of constructors/destructors with the same
207
         initialization priority.  */
208
      do
209
        {
210
          priority_type p;
211
          fn = cdtors[i];
212
          p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
213
          if (!body)
214
            priority = p;
215
          else if (p != priority)
216
            break;
217
          append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
218
                                                              fn, 0),
219
                                    &body);
220
          ++i;
221
        }
222
      while (i < len);
223
      gcc_assert (body != NULL_TREE);
224
      /* Generate a function to call all the function of like
225
         priority.  */
226
      cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
227
    }
228
}
229
 
230
/* Comparison function for qsort.  P1 and P2 are actually of type
231
   "tree *" and point to static constructors.  DECL_INIT_PRIORITY is
232
   used to determine the sort order.  */
233
 
234
static int
235
compare_ctor (const void *p1, const void *p2)
236
{
237
  tree f1;
238
  tree f2;
239
  int priority1;
240
  int priority2;
241
 
242
  f1 = *(const tree *)p1;
243
  f2 = *(const tree *)p2;
244
  priority1 = DECL_INIT_PRIORITY (f1);
245
  priority2 = DECL_INIT_PRIORITY (f2);
246
 
247
  if (priority1 < priority2)
248
    return -1;
249
  else if (priority1 > priority2)
250
    return 1;
251
  else
252
    /* Ensure a stable sort.  */
253
    return (const tree *)p1 - (const tree *)p2;
254
}
255
 
256
/* Comparison function for qsort.  P1 and P2 are actually of type
257
   "tree *" and point to static destructors.  DECL_FINI_PRIORITY is
258
   used to determine the sort order.  */
259
 
260
static int
261
compare_dtor (const void *p1, const void *p2)
262
{
263
  tree f1;
264
  tree f2;
265
  int priority1;
266
  int priority2;
267
 
268
  f1 = *(const tree *)p1;
269
  f2 = *(const tree *)p2;
270
  priority1 = DECL_FINI_PRIORITY (f1);
271
  priority2 = DECL_FINI_PRIORITY (f2);
272
 
273
  if (priority1 < priority2)
274
    return -1;
275
  else if (priority1 > priority2)
276
    return 1;
277
  else
278
    /* Ensure a stable sort.  */
279
    return (const tree *)p1 - (const tree *)p2;
280
}
281
 
282
/* Generate functions to call static constructors and destructors
283
   for targets that do not support .ctors/.dtors sections.  These
284
   functions have magic names which are detected by collect2.  */
285
 
286
static void
287
cgraph_build_cdtor_fns (void)
288
{
289
  if (!VEC_empty (tree, static_ctors))
290
    {
291
      gcc_assert (!targetm.have_ctors_dtors);
292
      qsort (VEC_address (tree, static_ctors),
293
             VEC_length (tree, static_ctors),
294
             sizeof (tree),
295
             compare_ctor);
296
      build_cdtor (/*ctor_p=*/true,
297
                   VEC_address (tree, static_ctors),
298
                   VEC_length (tree, static_ctors));
299
      VEC_truncate (tree, static_ctors, 0);
300
    }
301
 
302
  if (!VEC_empty (tree, static_dtors))
303
    {
304
      gcc_assert (!targetm.have_ctors_dtors);
305
      qsort (VEC_address (tree, static_dtors),
306
             VEC_length (tree, static_dtors),
307
             sizeof (tree),
308
             compare_dtor);
309
      build_cdtor (/*ctor_p=*/false,
310
                   VEC_address (tree, static_dtors),
311
                   VEC_length (tree, static_dtors));
312
      VEC_truncate (tree, static_dtors, 0);
313
    }
314
}
315
 
316
/* Determine if function DECL is needed.  That is, visible to something
317
   either outside this translation unit, something magic in the system
318
   configury.  */
319
 
320
bool
321
cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
322
{
323
  /* If the user told us it is used, then it must be so.  */
324
  if (node->local.externally_visible)
325
    return true;
326
 
327
  /* ??? If the assembler name is set by hand, it is possible to assemble
328
     the name later after finalizing the function and the fact is noticed
329
     in assemble_name then.  This is arguably a bug.  */
330
  if (DECL_ASSEMBLER_NAME_SET_P (decl)
331
      && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
332
    return true;
333
 
334
  /* With -fkeep-inline-functions we are keeping all inline functions except
335
     for extern inline ones.  */
336
  if (flag_keep_inline_functions
337
      && DECL_DECLARED_INLINE_P (decl)
338
      && !DECL_EXTERNAL (decl)
339
      && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
340
     return true;
341
 
342
  /* If we decided it was needed before, but at the time we didn't have
343
     the body of the function available, then it's still needed.  We have
344
     to go back and re-check its dependencies now.  */
345
  if (node->needed)
346
    return true;
347
 
348
  /* Externally visible functions must be output.  The exception is
349
     COMDAT functions that must be output only when they are needed.
350
 
351
     When not optimizing, also output the static functions. (see
352
     PR24561), but don't do so for always_inline functions, functions
353
     declared inline and nested functions.  These was optimized out
354
     in the original implementation and it is unclear whether we want
355
     to change the behavior here.  */
356
  if (((TREE_PUBLIC (decl)
357
        || (!optimize && !node->local.disregard_inline_limits
358
            && !DECL_DECLARED_INLINE_P (decl)
359
            && !node->origin))
360
       && !flag_whole_program
361
       && !flag_lto
362
       && !flag_whopr)
363
      && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
364
    return true;
365
 
366
  /* Constructors and destructors are reachable from the runtime by
367
     some mechanism.  */
368
  if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
369
    return true;
370
 
371
  return false;
372
}
373
 
374
/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
375
   functions into callgraph in a way so they look like ordinary reachable
376
   functions inserted into callgraph already at construction time.  */
377
 
378
bool
379
cgraph_process_new_functions (void)
380
{
381
  bool output = false;
382
  tree fndecl;
383
  struct cgraph_node *node;
384
 
385
  /*  Note that this queue may grow as its being processed, as the new
386
      functions may generate new ones.  */
387
  while (cgraph_new_nodes)
388
    {
389
      node = cgraph_new_nodes;
390
      fndecl = node->decl;
391
      cgraph_new_nodes = cgraph_new_nodes->next_needed;
392
      switch (cgraph_state)
393
        {
394
        case CGRAPH_STATE_CONSTRUCTION:
395
          /* At construction time we just need to finalize function and move
396
             it into reachable functions list.  */
397
 
398
          node->next_needed = NULL;
399
          cgraph_finalize_function (fndecl, false);
400
          cgraph_mark_reachable_node (node);
401
          output = true;
402
          break;
403
 
404
        case CGRAPH_STATE_IPA:
405
        case CGRAPH_STATE_IPA_SSA:
406
          /* When IPA optimization already started, do all essential
407
             transformations that has been already performed on the whole
408
             cgraph but not on this function.  */
409
 
410
          gimple_register_cfg_hooks ();
411
          if (!node->analyzed)
412
            cgraph_analyze_function (node);
413
          push_cfun (DECL_STRUCT_FUNCTION (fndecl));
414
          current_function_decl = fndecl;
415
          compute_inline_parameters (node);
416
          if ((cgraph_state == CGRAPH_STATE_IPA_SSA
417
              && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
418
              /* When not optimizing, be sure we run early local passes anyway
419
                 to expand OMP.  */
420
              || !optimize)
421
            execute_pass_list (pass_early_local_passes.pass.sub);
422
          free_dominance_info (CDI_POST_DOMINATORS);
423
          free_dominance_info (CDI_DOMINATORS);
424
          pop_cfun ();
425
          current_function_decl = NULL;
426
          break;
427
 
428
        case CGRAPH_STATE_EXPANSION:
429
          /* Functions created during expansion shall be compiled
430
             directly.  */
431
          node->process = 0;
432
          cgraph_expand_function (node);
433
          break;
434
 
435
        default:
436
          gcc_unreachable ();
437
          break;
438
        }
439
      cgraph_call_function_insertion_hooks (node);
440
    }
441
  return output;
442
}
443
 
444
/* As an GCC extension we allow redefinition of the function.  The
445
   semantics when both copies of bodies differ is not well defined.
446
   We replace the old body with new body so in unit at a time mode
447
   we always use new body, while in normal mode we may end up with
448
   old body inlined into some functions and new body expanded and
449
   inlined in others.
450
 
451
   ??? It may make more sense to use one body for inlining and other
452
   body for expanding the function but this is difficult to do.  */
453
 
454
static void
455
cgraph_reset_node (struct cgraph_node *node)
456
{
457
  /* If node->process is set, then we have already begun whole-unit analysis.
458
     This is *not* testing for whether we've already emitted the function.
459
     That case can be sort-of legitimately seen with real function redefinition
460
     errors.  I would argue that the front end should never present us with
461
     such a case, but don't enforce that for now.  */
462
  gcc_assert (!node->process);
463
 
464
  /* Reset our data structures so we can analyze the function again.  */
465
  memset (&node->local, 0, sizeof (node->local));
466
  memset (&node->global, 0, sizeof (node->global));
467
  memset (&node->rtl, 0, sizeof (node->rtl));
468
  node->analyzed = false;
469
  node->local.redefined_extern_inline = true;
470
  node->local.finalized = false;
471
 
472
  cgraph_node_remove_callees (node);
473
 
474
  /* We may need to re-queue the node for assembling in case
475
     we already proceeded it and ignored as not needed or got
476
     a re-declaration in IMA mode.  */
477
  if (node->reachable)
478
    {
479
      struct cgraph_node *n;
480
 
481
      for (n = cgraph_nodes_queue; n; n = n->next_needed)
482
        if (n == node)
483
          break;
484
      if (!n)
485
        node->reachable = 0;
486
    }
487
}
488
 
489
static void
490
cgraph_lower_function (struct cgraph_node *node)
491
{
492
  if (node->lowered)
493
    return;
494
 
495
  if (node->nested)
496
    lower_nested_functions (node->decl);
497
  gcc_assert (!node->nested);
498
 
499
  tree_lowering_passes (node->decl);
500
  node->lowered = true;
501
}
502
 
503
/* DECL has been parsed.  Take it, queue it, compile it at the whim of the
504
   logic in effect.  If NESTED is true, then our caller cannot stand to have
505
   the garbage collector run at the moment.  We would need to either create
506
   a new GC context, or just not compile right now.  */
507
 
508
void
509
cgraph_finalize_function (tree decl, bool nested)
510
{
511
  struct cgraph_node *node = cgraph_node (decl);
512
 
513
  if (node->local.finalized)
514
    cgraph_reset_node (node);
515
 
516
  node->pid = cgraph_max_pid ++;
517
  notice_global_symbol (decl);
518
  node->local.finalized = true;
519
  node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
520
  node->finalized_by_frontend = true;
521
  record_cdtor_fn (node->decl);
522
 
523
  if (cgraph_decide_is_function_needed (node, decl))
524
    cgraph_mark_needed_node (node);
525
 
526
  /* Since we reclaim unreachable nodes at the end of every language
527
     level unit, we need to be conservative about possible entry points
528
     there.  */
529
  if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
530
    cgraph_mark_reachable_node (node);
531
 
532
  /* If we've not yet emitted decl, tell the debug info about it.  */
533
  if (!TREE_ASM_WRITTEN (decl))
534
    (*debug_hooks->deferred_inline_function) (decl);
535
 
536
  /* Possibly warn about unused parameters.  */
537
  if (warn_unused_parameter)
538
    do_warn_unused_parameter (decl);
539
 
540
  if (!nested)
541
    ggc_collect ();
542
}
543
 
544
/* C99 extern inline keywords allow changing of declaration after function
545
   has been finalized.  We need to re-decide if we want to mark the function as
546
   needed then.   */
547
 
548
void
549
cgraph_mark_if_needed (tree decl)
550
{
551
  struct cgraph_node *node = cgraph_node (decl);
552
  if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
553
    cgraph_mark_needed_node (node);
554
}
555
 
556
/* Return TRUE if NODE2 is equivalent to NODE or its clone.  */
557
static bool
558
clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
559
{
560
  while (node != node2 && node2)
561
    node2 = node2->clone_of;
562
  return node2 != NULL;
563
}
564
 
565
/* Verify cgraph nodes of given cgraph node.  */
566
void
567
verify_cgraph_node (struct cgraph_node *node)
568
{
569
  struct cgraph_edge *e;
570
  struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
571
  struct function *saved_cfun = cfun;
572
  basic_block this_block;
573
  gimple_stmt_iterator gsi;
574
  bool error_found = false;
575
 
576
  if (errorcount || sorrycount)
577
    return;
578
 
579
  timevar_push (TV_CGRAPH_VERIFY);
580
  /* debug_generic_stmt needs correct cfun */
581
  set_cfun (this_cfun);
582
  for (e = node->callees; e; e = e->next_callee)
583
    if (e->aux)
584
      {
585
        error ("aux field set for edge %s->%s",
586
               identifier_to_locale (cgraph_node_name (e->caller)),
587
               identifier_to_locale (cgraph_node_name (e->callee)));
588
        error_found = true;
589
      }
590
  if (node->count < 0)
591
    {
592
      error ("Execution count is negative");
593
      error_found = true;
594
    }
595
  if (node->global.inlined_to && node->local.externally_visible)
596
    {
597
      error ("Externally visible inline clone");
598
      error_found = true;
599
    }
600
  if (node->global.inlined_to && node->address_taken)
601
    {
602
      error ("Inline clone with address taken");
603
      error_found = true;
604
    }
605
  if (node->global.inlined_to && node->needed)
606
    {
607
      error ("Inline clone is needed");
608
      error_found = true;
609
    }
610
  for (e = node->callers; e; e = e->next_caller)
611
    {
612
      if (e->count < 0)
613
        {
614
          error ("caller edge count is negative");
615
          error_found = true;
616
        }
617
      if (e->frequency < 0)
618
        {
619
          error ("caller edge frequency is negative");
620
          error_found = true;
621
        }
622
      if (e->frequency > CGRAPH_FREQ_MAX)
623
        {
624
          error ("caller edge frequency is too large");
625
          error_found = true;
626
        }
627
      if (gimple_has_body_p (e->caller->decl)
628
          && !e->caller->global.inlined_to
629
          && (e->frequency
630
              != compute_call_stmt_bb_frequency (e->caller->decl,
631
                                                 gimple_bb (e->call_stmt))))
632
        {
633
          error ("caller edge frequency %i does not match BB freqency %i",
634
                 e->frequency,
635
                 compute_call_stmt_bb_frequency (e->caller->decl,
636
                                                 gimple_bb (e->call_stmt)));
637
          error_found = true;
638
        }
639
      if (!e->inline_failed)
640
        {
641
          if (node->global.inlined_to
642
              != (e->caller->global.inlined_to
643
                  ? e->caller->global.inlined_to : e->caller))
644
            {
645
              error ("inlined_to pointer is wrong");
646
              error_found = true;
647
            }
648
          if (node->callers->next_caller)
649
            {
650
              error ("multiple inline callers");
651
              error_found = true;
652
            }
653
        }
654
      else
655
        if (node->global.inlined_to)
656
          {
657
            error ("inlined_to pointer set for noninline callers");
658
            error_found = true;
659
          }
660
    }
661
  if (!node->callers && node->global.inlined_to)
662
    {
663
      error ("inlined_to pointer is set but no predecessors found");
664
      error_found = true;
665
    }
666
  if (node->global.inlined_to == node)
667
    {
668
      error ("inlined_to pointer refers to itself");
669
      error_found = true;
670
    }
671
 
672
  if (!cgraph_node (node->decl))
673
    {
674
      error ("node not found in cgraph_hash");
675
      error_found = true;
676
    }
677
 
678
  if (node->clone_of)
679
    {
680
      struct cgraph_node *n;
681
      for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
682
        if (n == node)
683
          break;
684
      if (!n)
685
        {
686
          error ("node has wrong clone_of");
687
          error_found = true;
688
        }
689
    }
690
  if (node->clones)
691
    {
692
      struct cgraph_node *n;
693
      for (n = node->clones; n; n = n->next_sibling_clone)
694
        if (n->clone_of != node)
695
          break;
696
      if (n)
697
        {
698
          error ("node has wrong clone list");
699
          error_found = true;
700
        }
701
    }
702
  if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
703
    {
704
       error ("node is in clone list but it is not clone");
705
       error_found = true;
706
    }
707
  if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
708
    {
709
      error ("node has wrong prev_clone pointer");
710
      error_found = true;
711
    }
712
  if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
713
    {
714
      error ("double linked list of clones corrupted");
715
      error_found = true;
716
    }
717
  if (node->same_comdat_group)
718
    {
719
      struct cgraph_node *n = node->same_comdat_group;
720
 
721
      if (!DECL_ONE_ONLY (node->decl))
722
        {
723
          error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
724
          error_found = true;
725
        }
726
      if (n == node)
727
        {
728
          error ("node is alone in a comdat group");
729
          error_found = true;
730
        }
731
      do
732
        {
733
          if (!n->same_comdat_group)
734
            {
735
              error ("same_comdat_group is not a circular list");
736
              error_found = true;
737
              break;
738
            }
739
          n = n->same_comdat_group;
740
        }
741
      while (n != node);
742
    }
743
 
744
  if (node->analyzed && gimple_has_body_p (node->decl)
745
      && !TREE_ASM_WRITTEN (node->decl)
746
      && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
747
      && !flag_wpa)
748
    {
749
      if (this_cfun->cfg)
750
        {
751
          /* The nodes we're interested in are never shared, so walk
752
             the tree ignoring duplicates.  */
753
          struct pointer_set_t *visited_nodes = pointer_set_create ();
754
          /* Reach the trees by walking over the CFG, and note the
755
             enclosing basic-blocks in the call edges.  */
756
          FOR_EACH_BB_FN (this_block, this_cfun)
757
            for (gsi = gsi_start_bb (this_block);
758
                 !gsi_end_p (gsi);
759
                 gsi_next (&gsi))
760
              {
761
                gimple stmt = gsi_stmt (gsi);
762
                tree decl;
763
                if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
764
                  {
765
                    struct cgraph_edge *e = cgraph_edge (node, stmt);
766
                    if (e)
767
                      {
768
                        if (e->aux)
769
                          {
770
                            error ("shared call_stmt:");
771
                            debug_gimple_stmt (stmt);
772
                            error_found = true;
773
                          }
774
                        if (e->callee->same_body_alias)
775
                          {
776
                            error ("edge points to same body alias:");
777
                            debug_tree (e->callee->decl);
778
                            error_found = true;
779
                          }
780
                        else if (!node->global.inlined_to
781
                                 && !e->callee->global.inlined_to
782
                                 && !clone_of_p (cgraph_node (decl), e->callee))
783
                          {
784
                            error ("edge points to wrong declaration:");
785
                            debug_tree (e->callee->decl);
786
                            fprintf (stderr," Instead of:");
787
                            debug_tree (decl);
788
                            error_found = true;
789
                          }
790
                        e->aux = (void *)1;
791
                      }
792
                    else
793
                      {
794
                        error ("missing callgraph edge for call stmt:");
795
                        debug_gimple_stmt (stmt);
796
                        error_found = true;
797
                      }
798
                  }
799
              }
800
          pointer_set_destroy (visited_nodes);
801
        }
802
      else
803
        /* No CFG available?!  */
804
        gcc_unreachable ();
805
 
806
      for (e = node->callees; e; e = e->next_callee)
807
        {
808
          if (!e->aux && !e->indirect_call)
809
            {
810
              error ("edge %s->%s has no corresponding call_stmt",
811
                     identifier_to_locale (cgraph_node_name (e->caller)),
812
                     identifier_to_locale (cgraph_node_name (e->callee)));
813
              debug_gimple_stmt (e->call_stmt);
814
              error_found = true;
815
            }
816
          e->aux = 0;
817
        }
818
    }
819
  if (error_found)
820
    {
821
      dump_cgraph_node (stderr, node);
822
      internal_error ("verify_cgraph_node failed");
823
    }
824
  set_cfun (saved_cfun);
825
  timevar_pop (TV_CGRAPH_VERIFY);
826
}
827
 
828
/* Verify whole cgraph structure.  */
829
void
830
verify_cgraph (void)
831
{
832
  struct cgraph_node *node;
833
 
834
  if (sorrycount || errorcount)
835
    return;
836
 
837
  for (node = cgraph_nodes; node; node = node->next)
838
    verify_cgraph_node (node);
839
}
840
 
841
/* Output all asm statements we have stored up to be output.  */
842
 
843
static void
844
cgraph_output_pending_asms (void)
845
{
846
  struct cgraph_asm_node *can;
847
 
848
  if (errorcount || sorrycount)
849
    return;
850
 
851
  for (can = cgraph_asm_nodes; can; can = can->next)
852
    assemble_asm (can->asm_str);
853
  cgraph_asm_nodes = NULL;
854
}
855
 
856
/* Analyze the function scheduled to be output.  */
857
static void
858
cgraph_analyze_function (struct cgraph_node *node)
859
{
860
  tree save = current_function_decl;
861
  tree decl = node->decl;
862
 
863
  current_function_decl = decl;
864
  push_cfun (DECL_STRUCT_FUNCTION (decl));
865
 
866
  assign_assembler_name_if_neeeded (node->decl);
867
 
868
  /* Make sure to gimplify bodies only once.  During analyzing a
869
     function we lower it, which will require gimplified nested
870
     functions, so we can end up here with an already gimplified
871
     body.  */
872
  if (!gimple_body (decl))
873
    gimplify_function_tree (decl);
874
  dump_function (TDI_generic, decl);
875
 
876
  cgraph_lower_function (node);
877
  node->analyzed = true;
878
 
879
  pop_cfun ();
880
  current_function_decl = save;
881
}
882
 
883
/* Look for externally_visible and used attributes and mark cgraph nodes
884
   accordingly.
885
 
886
   We cannot mark the nodes at the point the attributes are processed (in
887
   handle_*_attribute) because the copy of the declarations available at that
888
   point may not be canonical.  For example, in:
889
 
890
    void f();
891
    void f() __attribute__((used));
892
 
893
   the declaration we see in handle_used_attribute will be the second
894
   declaration -- but the front end will subsequently merge that declaration
895
   with the original declaration and discard the second declaration.
896
 
897
   Furthermore, we can't mark these nodes in cgraph_finalize_function because:
898
 
899
    void f() {}
900
    void f() __attribute__((externally_visible));
901
 
902
   is valid.
903
 
904
   So, we walk the nodes at the end of the translation unit, applying the
905
   attributes at that point.  */
906
 
907
static void
908
process_function_and_variable_attributes (struct cgraph_node *first,
909
                                          struct varpool_node *first_var)
910
{
911
  struct cgraph_node *node;
912
  struct varpool_node *vnode;
913
 
914
  for (node = cgraph_nodes; node != first; node = node->next)
915
    {
916
      tree decl = node->decl;
917
      if (DECL_PRESERVE_P (decl))
918
        {
919
          mark_decl_referenced (decl);
920
          if (node->local.finalized)
921
             cgraph_mark_needed_node (node);
922
        }
923
      if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
924
        {
925
          if (! TREE_PUBLIC (node->decl))
926
            warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
927
                        "%<externally_visible%>"
928
                        " attribute have effect only on public objects");
929
          else if (node->local.finalized)
930
             cgraph_mark_needed_node (node);
931
        }
932
    }
933
  for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
934
    {
935
      tree decl = vnode->decl;
936
      if (DECL_PRESERVE_P (decl))
937
        {
938
          mark_decl_referenced (decl);
939
          vnode->force_output = true;
940
          if (vnode->finalized)
941
            varpool_mark_needed_node (vnode);
942
        }
943
      if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
944
        {
945
          if (! TREE_PUBLIC (vnode->decl))
946
            warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
947
                        "%<externally_visible%>"
948
                        " attribute have effect only on public objects");
949
          else if (vnode->finalized)
950
            varpool_mark_needed_node (vnode);
951
        }
952
    }
953
}
954
 
955
/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
956
   each reachable functions) and build cgraph.
957
   The function can be called multiple times after inserting new nodes
958
   into beginning of queue.  Just the new part of queue is re-scanned then.  */
959
 
960
static void
961
cgraph_analyze_functions (void)
962
{
963
  /* Keep track of already processed nodes when called multiple times for
964
     intermodule optimization.  */
965
  static struct cgraph_node *first_analyzed;
966
  struct cgraph_node *first_processed = first_analyzed;
967
  static struct varpool_node *first_analyzed_var;
968
  struct cgraph_node *node, *next;
969
 
970
  process_function_and_variable_attributes (first_processed,
971
                                            first_analyzed_var);
972
  first_processed = cgraph_nodes;
973
  first_analyzed_var = varpool_nodes;
974
  varpool_analyze_pending_decls ();
975
  if (cgraph_dump_file)
976
    {
977
      fprintf (cgraph_dump_file, "Initial entry points:");
978
      for (node = cgraph_nodes; node != first_analyzed; node = node->next)
979
        if (node->needed)
980
          fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
981
      fprintf (cgraph_dump_file, "\n");
982
    }
983
  cgraph_process_new_functions ();
984
 
985
  /* Propagate reachability flag and lower representation of all reachable
986
     functions.  In the future, lowering will introduce new functions and
987
     new entry points on the way (by template instantiation and virtual
988
     method table generation for instance).  */
989
  while (cgraph_nodes_queue)
990
    {
991
      struct cgraph_edge *edge;
992
      tree decl = cgraph_nodes_queue->decl;
993
 
994
      node = cgraph_nodes_queue;
995
      cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
996
      node->next_needed = NULL;
997
 
998
      /* ??? It is possible to create extern inline function and later using
999
         weak alias attribute to kill its body. See
1000
         gcc.c-torture/compile/20011119-1.c  */
1001
      if (!DECL_STRUCT_FUNCTION (decl))
1002
        {
1003
          cgraph_reset_node (node);
1004
          continue;
1005
        }
1006
 
1007
      if (!node->analyzed)
1008
        cgraph_analyze_function (node);
1009
 
1010
      for (edge = node->callees; edge; edge = edge->next_callee)
1011
        if (!edge->callee->reachable)
1012
          cgraph_mark_reachable_node (edge->callee);
1013
 
1014
      if (node->same_comdat_group)
1015
        {
1016
          for (next = node->same_comdat_group;
1017
               next != node;
1018
               next = next->same_comdat_group)
1019
            cgraph_mark_reachable_node (next);
1020
        }
1021
 
1022
      /* If decl is a clone of an abstract function, mark that abstract
1023
         function so that we don't release its body. The DECL_INITIAL() of that
1024
         abstract function declaration will be later needed to output debug info.  */
1025
      if (DECL_ABSTRACT_ORIGIN (decl))
1026
        {
1027
          struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
1028
          origin_node->abstract_and_needed = true;
1029
        }
1030
 
1031
      /* We finalize local static variables during constructing callgraph
1032
         edges.  Process their attributes too.  */
1033
      process_function_and_variable_attributes (first_processed,
1034
                                                first_analyzed_var);
1035
      first_processed = cgraph_nodes;
1036
      first_analyzed_var = varpool_nodes;
1037
      varpool_analyze_pending_decls ();
1038
      cgraph_process_new_functions ();
1039
    }
1040
 
1041
  /* Collect entry points to the unit.  */
1042
  if (cgraph_dump_file)
1043
    {
1044
      fprintf (cgraph_dump_file, "Unit entry points:");
1045
      for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1046
        if (node->needed)
1047
          fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1048
      fprintf (cgraph_dump_file, "\n\nInitial ");
1049
      dump_cgraph (cgraph_dump_file);
1050
    }
1051
 
1052
  if (cgraph_dump_file)
1053
    fprintf (cgraph_dump_file, "\nReclaiming functions:");
1054
 
1055
  for (node = cgraph_nodes; node != first_analyzed; node = next)
1056
    {
1057
      tree decl = node->decl;
1058
      next = node->next;
1059
 
1060
      if (node->local.finalized && !gimple_has_body_p (decl))
1061
        cgraph_reset_node (node);
1062
 
1063
      if (!node->reachable && gimple_has_body_p (decl))
1064
        {
1065
          if (cgraph_dump_file)
1066
            fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1067
          cgraph_remove_node (node);
1068
          continue;
1069
        }
1070
      else
1071
        node->next_needed = NULL;
1072
      gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
1073
      gcc_assert (node->analyzed == node->local.finalized);
1074
    }
1075
  if (cgraph_dump_file)
1076
    {
1077
      fprintf (cgraph_dump_file, "\n\nReclaimed ");
1078
      dump_cgraph (cgraph_dump_file);
1079
    }
1080
  first_analyzed = cgraph_nodes;
1081
  ggc_collect ();
1082
}
1083
 
1084
 
1085
/* Analyze the whole compilation unit once it is parsed completely.  */
1086
 
1087
void
1088
cgraph_finalize_compilation_unit (void)
1089
{
1090
  timevar_push (TV_CGRAPH);
1091
 
1092
  /* Do not skip analyzing the functions if there were errors, we
1093
     miss diagnostics for following functions otherwise.  */
1094
 
1095
  /* Emit size functions we didn't inline.  */
1096
  finalize_size_functions ();
1097
 
1098
  /* Call functions declared with the "constructor" or "destructor"
1099
     attribute.  */
1100
  cgraph_build_cdtor_fns ();
1101
 
1102
  /* Mark alias targets necessary and emit diagnostics.  */
1103
  finish_aliases_1 ();
1104
 
1105
  if (!quiet_flag)
1106
    {
1107
      fprintf (stderr, "\nAnalyzing compilation unit\n");
1108
      fflush (stderr);
1109
    }
1110
 
1111
  /* Gimplify and lower all functions, compute reachability and
1112
     remove unreachable nodes.  */
1113
  cgraph_analyze_functions ();
1114
 
1115
  /* Mark alias targets necessary and emit diagnostics.  */
1116
  finish_aliases_1 ();
1117
 
1118
  /* Gimplify and lower thunks.  */
1119
  cgraph_analyze_functions ();
1120
 
1121
  /* Finally drive the pass manager.  */
1122
  cgraph_optimize ();
1123
 
1124
  timevar_pop (TV_CGRAPH);
1125
}
1126
 
1127
 
1128
/* Figure out what functions we want to assemble.  */
1129
 
1130
static void
1131
cgraph_mark_functions_to_output (void)
1132
{
1133
  struct cgraph_node *node;
1134
#ifdef ENABLE_CHECKING
1135
  bool check_same_comdat_groups = false;
1136
 
1137
  for (node = cgraph_nodes; node; node = node->next)
1138
    gcc_assert (!node->process);
1139
#endif
1140
 
1141
  for (node = cgraph_nodes; node; node = node->next)
1142
    {
1143
      tree decl = node->decl;
1144
      struct cgraph_edge *e;
1145
 
1146
      gcc_assert (!node->process || node->same_comdat_group);
1147
      if (node->process)
1148
        continue;
1149
 
1150
      for (e = node->callers; e; e = e->next_caller)
1151
        if (e->inline_failed)
1152
          break;
1153
 
1154
      /* We need to output all local functions that are used and not
1155
         always inlined, as well as those that are reachable from
1156
         outside the current compilation unit.  */
1157
      if (node->analyzed
1158
          && !node->global.inlined_to
1159
          && (node->needed
1160
              || (e && node->reachable))
1161
          && !TREE_ASM_WRITTEN (decl)
1162
          && !DECL_EXTERNAL (decl))
1163
        {
1164
          node->process = 1;
1165
          if (node->same_comdat_group)
1166
            {
1167
              struct cgraph_node *next;
1168
              for (next = node->same_comdat_group;
1169
                   next != node;
1170
                   next = next->same_comdat_group)
1171
                next->process = 1;
1172
            }
1173
        }
1174
      else if (node->same_comdat_group)
1175
        {
1176
#ifdef ENABLE_CHECKING
1177
          check_same_comdat_groups = true;
1178
#endif
1179
        }
1180
      else
1181
        {
1182
          /* We should've reclaimed all functions that are not needed.  */
1183
#ifdef ENABLE_CHECKING
1184
          if (!node->global.inlined_to
1185
              && gimple_has_body_p (decl)
1186
              && !DECL_EXTERNAL (decl))
1187
            {
1188
              dump_cgraph_node (stderr, node);
1189
              internal_error ("failed to reclaim unneeded function");
1190
            }
1191
#endif
1192
          gcc_assert (node->global.inlined_to
1193
                      || !gimple_has_body_p (decl)
1194
                      || DECL_EXTERNAL (decl));
1195
 
1196
        }
1197
 
1198
    }
1199
#ifdef ENABLE_CHECKING
1200
  if (check_same_comdat_groups)
1201
    for (node = cgraph_nodes; node; node = node->next)
1202
      if (node->same_comdat_group && !node->process)
1203
        {
1204
          tree decl = node->decl;
1205
          if (!node->global.inlined_to
1206
              && gimple_has_body_p (decl)
1207
              && !DECL_EXTERNAL (decl))
1208
            {
1209
              dump_cgraph_node (stderr, node);
1210
              internal_error ("failed to reclaim unneeded function");
1211
            }
1212
        }
1213
#endif
1214
}
1215
 
1216
/* DECL is FUNCTION_DECL.  Initialize datastructures so DECL is a function
1217
   in lowered gimple form.
1218
 
1219
   Set current_function_decl and cfun to newly constructed empty function body.
1220
   return basic block in the function body.  */
1221
 
1222
static basic_block
1223
init_lowered_empty_function (tree decl)
1224
{
1225
  basic_block bb;
1226
 
1227
  current_function_decl = decl;
1228
  allocate_struct_function (decl, false);
1229
  gimple_register_cfg_hooks ();
1230
  init_empty_tree_cfg ();
1231
  init_tree_ssa (cfun);
1232
  init_ssa_operands ();
1233
  cfun->gimple_df->in_ssa_p = true;
1234
  DECL_INITIAL (decl) = make_node (BLOCK);
1235
 
1236
  DECL_SAVED_TREE (decl) = error_mark_node;
1237
  cfun->curr_properties |=
1238
    (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1239
     PROP_ssa);
1240
 
1241
  /* Create BB for body of the function and connect it properly.  */
1242
  bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1243
  make_edge (ENTRY_BLOCK_PTR, bb, 0);
1244
  make_edge (bb, EXIT_BLOCK_PTR, 0);
1245
 
1246
  return bb;
1247
}
1248
 
1249
/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1250
   offset indicated by VIRTUAL_OFFSET, if that is
1251
   non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1252
   zero for a result adjusting thunk.  */
1253
 
1254
static tree
1255
thunk_adjust (gimple_stmt_iterator * bsi,
1256
              tree ptr, bool this_adjusting,
1257
              HOST_WIDE_INT fixed_offset, tree virtual_offset)
1258
{
1259
  gimple stmt;
1260
  tree ret;
1261
 
1262
  if (this_adjusting
1263
      && fixed_offset != 0)
1264
    {
1265
      stmt = gimple_build_assign (ptr,
1266
                                  fold_build2_loc (input_location,
1267
                                                   POINTER_PLUS_EXPR,
1268
                                                   TREE_TYPE (ptr), ptr,
1269
                                                   size_int (fixed_offset)));
1270
      gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1271
    }
1272
 
1273
  /* If there's a virtual offset, look up that value in the vtable and
1274
     adjust the pointer again.  */
1275
  if (virtual_offset)
1276
    {
1277
      tree vtabletmp;
1278
      tree vtabletmp2;
1279
      tree vtabletmp3;
1280
      tree offsettmp;
1281
 
1282
      if (!vtable_entry_type)
1283
        {
1284
          tree vfunc_type = make_node (FUNCTION_TYPE);
1285
          TREE_TYPE (vfunc_type) = integer_type_node;
1286
          TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1287
          layout_type (vfunc_type);
1288
 
1289
          vtable_entry_type = build_pointer_type (vfunc_type);
1290
        }
1291
 
1292
      vtabletmp =
1293
        create_tmp_var (build_pointer_type
1294
                        (build_pointer_type (vtable_entry_type)), "vptr");
1295
 
1296
      /* The vptr is always at offset zero in the object.  */
1297
      stmt = gimple_build_assign (vtabletmp,
1298
                                  build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1299
                                          ptr));
1300
      gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1301
      mark_symbols_for_renaming (stmt);
1302
      find_referenced_vars_in (stmt);
1303
 
1304
      /* Form the vtable address.  */
1305
      vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1306
                                   "vtableaddr");
1307
      stmt = gimple_build_assign (vtabletmp2,
1308
                                  build1 (INDIRECT_REF,
1309
                                          TREE_TYPE (vtabletmp2), vtabletmp));
1310
      gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1311
      mark_symbols_for_renaming (stmt);
1312
      find_referenced_vars_in (stmt);
1313
 
1314
      /* Find the entry with the vcall offset.  */
1315
      stmt = gimple_build_assign (vtabletmp2,
1316
                                  fold_build2_loc (input_location,
1317
                                                   POINTER_PLUS_EXPR,
1318
                                                   TREE_TYPE (vtabletmp2),
1319
                                                   vtabletmp2,
1320
                                                   fold_convert (sizetype,
1321
                                                                 virtual_offset)));
1322
      gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1323
 
1324
      /* Get the offset itself.  */
1325
      vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1326
                                   "vcalloffset");
1327
      stmt = gimple_build_assign (vtabletmp3,
1328
                                  build1 (INDIRECT_REF,
1329
                                          TREE_TYPE (vtabletmp3),
1330
                                          vtabletmp2));
1331
      gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1332
      mark_symbols_for_renaming (stmt);
1333
      find_referenced_vars_in (stmt);
1334
 
1335
      /* Cast to sizetype.  */
1336
      offsettmp = create_tmp_var (sizetype, "offset");
1337
      stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1338
      gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1339
      mark_symbols_for_renaming (stmt);
1340
      find_referenced_vars_in (stmt);
1341
 
1342
      /* Adjust the `this' pointer.  */
1343
      ptr = fold_build2_loc (input_location,
1344
                             POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1345
                             offsettmp);
1346
    }
1347
 
1348
  if (!this_adjusting
1349
      && fixed_offset != 0)
1350
    /* Adjust the pointer by the constant.  */
1351
    {
1352
      tree ptrtmp;
1353
 
1354
      if (TREE_CODE (ptr) == VAR_DECL)
1355
        ptrtmp = ptr;
1356
      else
1357
        {
1358
          ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1359
          stmt = gimple_build_assign (ptrtmp, ptr);
1360
          gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1361
          mark_symbols_for_renaming (stmt);
1362
          find_referenced_vars_in (stmt);
1363
        }
1364
      ptr = fold_build2_loc (input_location,
1365
                             POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1366
                             size_int (fixed_offset));
1367
    }
1368
 
1369
  /* Emit the statement and gimplify the adjustment expression.  */
1370
  ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1371
  stmt = gimple_build_assign (ret, ptr);
1372
  mark_symbols_for_renaming (stmt);
1373
  find_referenced_vars_in (stmt);
1374
  gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1375
 
1376
  return ret;
1377
}
1378
 
1379
/* Produce assembler for thunk NODE.  */
1380
 
1381
static void
1382
assemble_thunk (struct cgraph_node *node)
1383
{
1384
  bool this_adjusting = node->thunk.this_adjusting;
1385
  HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1386
  HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1387
  tree virtual_offset = NULL;
1388
  tree alias = node->thunk.alias;
1389
  tree thunk_fndecl = node->decl;
1390
  tree a = DECL_ARGUMENTS (thunk_fndecl);
1391
 
1392
  current_function_decl = thunk_fndecl;
1393
 
1394
  if (this_adjusting
1395
      && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1396
                                              virtual_value, alias))
1397
    {
1398
      const char *fnname;
1399
      tree fn_block;
1400
 
1401
      DECL_RESULT (thunk_fndecl)
1402
        = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1403
                      RESULT_DECL, 0, integer_type_node);
1404
      fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1405
 
1406
      /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1407
         create one.  */
1408
      fn_block = make_node (BLOCK);
1409
      BLOCK_VARS (fn_block) = a;
1410
      DECL_INITIAL (thunk_fndecl) = fn_block;
1411
      init_function_start (thunk_fndecl);
1412
      cfun->is_thunk = 1;
1413
      assemble_start_function (thunk_fndecl, fnname);
1414
 
1415
      targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1416
                                       fixed_offset, virtual_value, alias);
1417
 
1418
      assemble_end_function (thunk_fndecl, fnname);
1419
      init_insn_lengths ();
1420
      free_after_compilation (cfun);
1421
      set_cfun (NULL);
1422
      TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1423
    }
1424
  else
1425
    {
1426
      tree restype;
1427
      basic_block bb, then_bb, else_bb, return_bb;
1428
      gimple_stmt_iterator bsi;
1429
      int nargs = 0;
1430
      tree arg;
1431
      int i;
1432
      tree resdecl;
1433
      tree restmp = NULL;
1434
      VEC(tree, heap) *vargs;
1435
 
1436
      gimple call;
1437
      gimple ret;
1438
 
1439
      DECL_IGNORED_P (thunk_fndecl) = 1;
1440
      bitmap_obstack_initialize (NULL);
1441
 
1442
      if (node->thunk.virtual_offset_p)
1443
        virtual_offset = size_int (virtual_value);
1444
 
1445
      /* Build the return declaration for the function.  */
1446
      restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1447
      if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1448
        {
1449
          resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1450
          DECL_ARTIFICIAL (resdecl) = 1;
1451
          DECL_IGNORED_P (resdecl) = 1;
1452
          DECL_RESULT (thunk_fndecl) = resdecl;
1453
        }
1454
      else
1455
        resdecl = DECL_RESULT (thunk_fndecl);
1456
 
1457
      bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1458
 
1459
      bsi = gsi_start_bb (bb);
1460
 
1461
      /* Build call to the function being thunked.  */
1462
      if (!VOID_TYPE_P (restype))
1463
        {
1464
          if (!is_gimple_reg_type (restype))
1465
            {
1466
              restmp = resdecl;
1467
              cfun->local_decls = tree_cons (NULL_TREE, restmp, cfun->local_decls);
1468
              BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1469
            }
1470
          else
1471
            restmp = create_tmp_var_raw (restype, "retval");
1472
        }
1473
 
1474
      for (arg = a; arg; arg = TREE_CHAIN (arg))
1475
        nargs++;
1476
      vargs = VEC_alloc (tree, heap, nargs);
1477
      if (this_adjusting)
1478
        VEC_quick_push (tree, vargs,
1479
                        thunk_adjust (&bsi,
1480
                                      a, 1, fixed_offset,
1481
                                      virtual_offset));
1482
      else
1483
        VEC_quick_push (tree, vargs, a);
1484
      for (i = 1, arg = TREE_CHAIN (a); i < nargs; i++, arg = TREE_CHAIN (arg))
1485
        VEC_quick_push (tree, vargs, arg);
1486
      call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1487
      VEC_free (tree, heap, vargs);
1488
      gimple_call_set_cannot_inline (call, true);
1489
      gimple_call_set_from_thunk (call, true);
1490
      if (restmp)
1491
        gimple_call_set_lhs (call, restmp);
1492
      gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1493
      mark_symbols_for_renaming (call);
1494
      find_referenced_vars_in (call);
1495
      update_stmt (call);
1496
 
1497
      if (restmp && !this_adjusting)
1498
        {
1499
          tree true_label = NULL_TREE;
1500
 
1501
          if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1502
            {
1503
              gimple stmt;
1504
              /* If the return type is a pointer, we need to
1505
                 protect against NULL.  We know there will be an
1506
                 adjustment, because that's why we're emitting a
1507
                 thunk.  */
1508
              then_bb = create_basic_block (NULL, (void *) 0, bb);
1509
              return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1510
              else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1511
              remove_edge (single_succ_edge (bb));
1512
              true_label = gimple_block_label (then_bb);
1513
              stmt = gimple_build_cond (NE_EXPR, restmp,
1514
                                        fold_convert (TREE_TYPE (restmp),
1515
                                                      integer_zero_node),
1516
                                        NULL_TREE, NULL_TREE);
1517
              gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1518
              make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1519
              make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1520
              make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1521
              make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1522
              make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1523
              bsi = gsi_last_bb (then_bb);
1524
            }
1525
 
1526
          restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1527
                                 fixed_offset, virtual_offset);
1528
          if (true_label)
1529
            {
1530
              gimple stmt;
1531
              bsi = gsi_last_bb (else_bb);
1532
              stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
1533
                                                                integer_zero_node));
1534
              gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1535
              bsi = gsi_last_bb (return_bb);
1536
            }
1537
        }
1538
      else
1539
        gimple_call_set_tail (call, true);
1540
 
1541
      /* Build return value.  */
1542
      ret = gimple_build_return (restmp);
1543
      gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1544
 
1545
      delete_unreachable_blocks ();
1546
      update_ssa (TODO_update_ssa);
1547
 
1548
      cgraph_remove_same_body_alias (node);
1549
      /* Since we want to emit the thunk, we explicitly mark its name as
1550
         referenced.  */
1551
      mark_decl_referenced (thunk_fndecl);
1552
      cgraph_add_new_function (thunk_fndecl, true);
1553
      bitmap_obstack_release (NULL);
1554
    }
1555
  current_function_decl = NULL;
1556
}
1557
 
1558
/* Expand function specified by NODE.  */
1559
 
1560
static void
1561
cgraph_expand_function (struct cgraph_node *node)
1562
{
1563
  tree decl = node->decl;
1564
 
1565
  /* We ought to not compile any inline clones.  */
1566
  gcc_assert (!node->global.inlined_to);
1567
 
1568
  announce_function (decl);
1569
  node->process = 0;
1570
 
1571
  gcc_assert (node->lowered);
1572
 
1573
  /* Generate RTL for the body of DECL.  */
1574
  tree_rest_of_compilation (decl);
1575
 
1576
  /* Make sure that BE didn't give up on compiling.  */
1577
  gcc_assert (TREE_ASM_WRITTEN (decl));
1578
  current_function_decl = NULL;
1579
  if (node->same_body)
1580
    {
1581
      struct cgraph_node *alias, *next;
1582
      bool saved_alias = node->alias;
1583
      for (alias = node->same_body;
1584
           alias && alias->next; alias = alias->next)
1585
        ;
1586
      /* Walk aliases in the order they were created; it is possible that
1587
         thunks reffers to the aliases made earlier.  */
1588
      for (; alias; alias = next)
1589
        {
1590
          next = alias->previous;
1591
          if (!alias->thunk.thunk_p)
1592
            assemble_alias (alias->decl,
1593
                            DECL_ASSEMBLER_NAME (alias->thunk.alias));
1594
          else
1595
            assemble_thunk (alias);
1596
        }
1597
      node->alias = saved_alias;
1598
    }
1599
  gcc_assert (!cgraph_preserve_function_body_p (decl));
1600
  cgraph_release_function_body (node);
1601
  /* Eliminate all call edges.  This is important so the GIMPLE_CALL no longer
1602
     points to the dead function body.  */
1603
  cgraph_node_remove_callees (node);
1604
 
1605
  cgraph_function_flags_ready = true;
1606
}
1607
 
1608
/* Return true when CALLER_DECL should be inlined into CALLEE_DECL.  */
1609
 
1610
bool
1611
cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1612
{
1613
  *reason = e->inline_failed;
1614
  return !e->inline_failed;
1615
}
1616
 
1617
 
1618
 
1619
/* Expand all functions that must be output.
1620
 
1621
   Attempt to topologically sort the nodes so function is output when
1622
   all called functions are already assembled to allow data to be
1623
   propagated across the callgraph.  Use a stack to get smaller distance
1624
   between a function and its callees (later we may choose to use a more
1625
   sophisticated algorithm for function reordering; we will likely want
1626
   to use subsections to make the output functions appear in top-down
1627
   order).  */
1628
 
1629
static void
1630
cgraph_expand_all_functions (void)
1631
{
1632
  struct cgraph_node *node;
1633
  struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1634
  int order_pos, new_order_pos = 0;
1635
  int i;
1636
 
1637
  order_pos = cgraph_postorder (order);
1638
  gcc_assert (order_pos == cgraph_n_nodes);
1639
 
1640
  /* Garbage collector may remove inline clones we eliminate during
1641
     optimization.  So we must be sure to not reference them.  */
1642
  for (i = 0; i < order_pos; i++)
1643
    if (order[i]->process)
1644
      order[new_order_pos++] = order[i];
1645
 
1646
  for (i = new_order_pos - 1; i >= 0; i--)
1647
    {
1648
      node = order[i];
1649
      if (node->process)
1650
        {
1651
          gcc_assert (node->reachable);
1652
          node->process = 0;
1653
          cgraph_expand_function (node);
1654
        }
1655
    }
1656
  cgraph_process_new_functions ();
1657
 
1658
  free (order);
1659
 
1660
}
1661
 
1662
/* This is used to sort the node types by the cgraph order number.  */
1663
 
1664
enum cgraph_order_sort_kind
1665
{
1666
  ORDER_UNDEFINED = 0,
1667
  ORDER_FUNCTION,
1668
  ORDER_VAR,
1669
  ORDER_ASM
1670
};
1671
 
1672
struct cgraph_order_sort
1673
{
1674
  enum cgraph_order_sort_kind kind;
1675
  union
1676
  {
1677
    struct cgraph_node *f;
1678
    struct varpool_node *v;
1679
    struct cgraph_asm_node *a;
1680
  } u;
1681
};
1682
 
1683
/* Output all functions, variables, and asm statements in the order
1684
   according to their order fields, which is the order in which they
1685
   appeared in the file.  This implements -fno-toplevel-reorder.  In
1686
   this mode we may output functions and variables which don't really
1687
   need to be output.  */
1688
 
1689
static void
1690
cgraph_output_in_order (void)
1691
{
1692
  int max;
1693
  struct cgraph_order_sort *nodes;
1694
  int i;
1695
  struct cgraph_node *pf;
1696
  struct varpool_node *pv;
1697
  struct cgraph_asm_node *pa;
1698
 
1699
  max = cgraph_order;
1700
  nodes = XCNEWVEC (struct cgraph_order_sort, max);
1701
 
1702
  varpool_analyze_pending_decls ();
1703
 
1704
  for (pf = cgraph_nodes; pf; pf = pf->next)
1705
    {
1706
      if (pf->process)
1707
        {
1708
          i = pf->order;
1709
          gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1710
          nodes[i].kind = ORDER_FUNCTION;
1711
          nodes[i].u.f = pf;
1712
        }
1713
    }
1714
 
1715
  for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1716
    {
1717
      i = pv->order;
1718
      gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1719
      nodes[i].kind = ORDER_VAR;
1720
      nodes[i].u.v = pv;
1721
    }
1722
 
1723
  for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1724
    {
1725
      i = pa->order;
1726
      gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1727
      nodes[i].kind = ORDER_ASM;
1728
      nodes[i].u.a = pa;
1729
    }
1730
 
1731
  /* In toplevel reorder mode we output all statics; mark them as needed.  */
1732
  for (i = 0; i < max; ++i)
1733
    {
1734
      if (nodes[i].kind == ORDER_VAR)
1735
        {
1736
          varpool_mark_needed_node (nodes[i].u.v);
1737
        }
1738
    }
1739
  varpool_empty_needed_queue ();
1740
 
1741
  for (i = 0; i < max; ++i)
1742
    {
1743
      switch (nodes[i].kind)
1744
        {
1745
        case ORDER_FUNCTION:
1746
          nodes[i].u.f->process = 0;
1747
          cgraph_expand_function (nodes[i].u.f);
1748
          break;
1749
 
1750
        case ORDER_VAR:
1751
          varpool_assemble_decl (nodes[i].u.v);
1752
          break;
1753
 
1754
        case ORDER_ASM:
1755
          assemble_asm (nodes[i].u.a->asm_str);
1756
          break;
1757
 
1758
        case ORDER_UNDEFINED:
1759
          break;
1760
 
1761
        default:
1762
          gcc_unreachable ();
1763
        }
1764
    }
1765
 
1766
  cgraph_asm_nodes = NULL;
1767
  free (nodes);
1768
}
1769
 
1770
/* Return true when function body of DECL still needs to be kept around
1771
   for later re-use.  */
1772
bool
1773
cgraph_preserve_function_body_p (tree decl)
1774
{
1775
  struct cgraph_node *node;
1776
 
1777
  gcc_assert (cgraph_global_info_ready);
1778
  /* Look if there is any clone around.  */
1779
  node = cgraph_node (decl);
1780
  if (node->clones)
1781
    return true;
1782
  return false;
1783
}
1784
 
1785
static void
1786
ipa_passes (void)
1787
{
1788
  set_cfun (NULL);
1789
  current_function_decl = NULL;
1790
  gimple_register_cfg_hooks ();
1791
  bitmap_obstack_initialize (NULL);
1792
 
1793
  invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1794
 
1795
  if (!in_lto_p)
1796
    execute_ipa_pass_list (all_small_ipa_passes);
1797
 
1798
  /* If pass_all_early_optimizations was not scheduled, the state of
1799
     the cgraph will not be properly updated.  Update it now.  */
1800
  if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1801
    cgraph_state = CGRAPH_STATE_IPA_SSA;
1802
 
1803
  if (!in_lto_p)
1804
    {
1805
      /* Generate coverage variables and constructors.  */
1806
      coverage_finish ();
1807
 
1808
      /* Process new functions added.  */
1809
      set_cfun (NULL);
1810
      current_function_decl = NULL;
1811
      cgraph_process_new_functions ();
1812
 
1813
      execute_ipa_summary_passes
1814
        ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1815
    }
1816
 
1817
  /* Some targets need to handle LTO assembler output specially.  */
1818
  if (flag_generate_lto)
1819
    targetm.asm_out.lto_start ();
1820
 
1821
  execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1822
 
1823
  if (!in_lto_p)
1824
    ipa_write_summaries ();
1825
 
1826
  if (flag_generate_lto)
1827
    targetm.asm_out.lto_end ();
1828
 
1829
  if (!flag_ltrans)
1830
    execute_ipa_pass_list (all_regular_ipa_passes);
1831
  invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1832
 
1833
  bitmap_obstack_release (NULL);
1834
}
1835
 
1836
 
1837
/* Perform simple optimizations based on callgraph.  */
1838
 
1839
void
1840
cgraph_optimize (void)
1841
{
1842
  if (errorcount || sorrycount)
1843
    return;
1844
 
1845
#ifdef ENABLE_CHECKING
1846
  verify_cgraph ();
1847
#endif
1848
 
1849
  /* Frontend may output common variables after the unit has been finalized.
1850
     It is safe to deal with them here as they are always zero initialized.  */
1851
  varpool_analyze_pending_decls ();
1852
 
1853
  timevar_push (TV_CGRAPHOPT);
1854
  if (pre_ipa_mem_report)
1855
    {
1856
      fprintf (stderr, "Memory consumption before IPA\n");
1857
      dump_memory_report (false);
1858
    }
1859
  if (!quiet_flag)
1860
    fprintf (stderr, "Performing interprocedural optimizations\n");
1861
  cgraph_state = CGRAPH_STATE_IPA;
1862
 
1863
  /* Don't run the IPA passes if there was any error or sorry messages.  */
1864
  if (errorcount == 0 && sorrycount == 0)
1865
    ipa_passes ();
1866
 
1867
  /* Do nothing else if any IPA pass found errors.  */
1868
  if (errorcount || sorrycount)
1869
    {
1870
      timevar_pop (TV_CGRAPHOPT);
1871
      return;
1872
    }
1873
 
1874
  /* This pass remove bodies of extern inline functions we never inlined.
1875
     Do this later so other IPA passes see what is really going on.  */
1876
  cgraph_remove_unreachable_nodes (false, dump_file);
1877
  cgraph_global_info_ready = true;
1878
  if (cgraph_dump_file)
1879
    {
1880
      fprintf (cgraph_dump_file, "Optimized ");
1881
      dump_cgraph (cgraph_dump_file);
1882
      dump_varpool (cgraph_dump_file);
1883
    }
1884
  if (post_ipa_mem_report)
1885
    {
1886
      fprintf (stderr, "Memory consumption after IPA\n");
1887
      dump_memory_report (false);
1888
    }
1889
  timevar_pop (TV_CGRAPHOPT);
1890
 
1891
  /* Output everything.  */
1892
  (*debug_hooks->assembly_start) ();
1893
  if (!quiet_flag)
1894
    fprintf (stderr, "Assembling functions:\n");
1895
#ifdef ENABLE_CHECKING
1896
  verify_cgraph ();
1897
#endif
1898
 
1899
  cgraph_materialize_all_clones ();
1900
  cgraph_mark_functions_to_output ();
1901
 
1902
  cgraph_state = CGRAPH_STATE_EXPANSION;
1903
  if (!flag_toplevel_reorder)
1904
    cgraph_output_in_order ();
1905
  else
1906
    {
1907
      cgraph_output_pending_asms ();
1908
 
1909
      cgraph_expand_all_functions ();
1910
      varpool_remove_unreferenced_decls ();
1911
 
1912
      varpool_assemble_pending_decls ();
1913
    }
1914
  cgraph_process_new_functions ();
1915
  cgraph_state = CGRAPH_STATE_FINISHED;
1916
 
1917
  if (cgraph_dump_file)
1918
    {
1919
      fprintf (cgraph_dump_file, "\nFinal ");
1920
      dump_cgraph (cgraph_dump_file);
1921
    }
1922
#ifdef ENABLE_CHECKING
1923
  verify_cgraph ();
1924
  /* Double check that all inline clones are gone and that all
1925
     function bodies have been released from memory.  */
1926
  if (!(sorrycount || errorcount))
1927
    {
1928
      struct cgraph_node *node;
1929
      bool error_found = false;
1930
 
1931
      for (node = cgraph_nodes; node; node = node->next)
1932
        if (node->analyzed
1933
            && (node->global.inlined_to
1934
                || gimple_has_body_p (node->decl)))
1935
          {
1936
            error_found = true;
1937
            dump_cgraph_node (stderr, node);
1938
          }
1939
      if (error_found)
1940
        internal_error ("nodes with unreleased memory found");
1941
    }
1942
#endif
1943
}
1944
 
1945
 
1946
/* Generate and emit a static constructor or destructor.  WHICH must
1947
   be one of 'I' (for a constructor) or 'D' (for a destructor).  BODY
1948
   is a STATEMENT_LIST containing GENERIC statements.  PRIORITY is the
1949
   initialization priority for this constructor or destructor.  */
1950
 
1951
void
1952
cgraph_build_static_cdtor (char which, tree body, int priority)
1953
{
1954
  static int counter = 0;
1955
  char which_buf[16];
1956
  tree decl, name, resdecl;
1957
 
1958
  /* The priority is encoded in the constructor or destructor name.
1959
     collect2 will sort the names and arrange that they are called at
1960
     program startup.  */
1961
  sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1962
  name = get_file_function_name (which_buf);
1963
 
1964
  decl = build_decl (input_location, FUNCTION_DECL, name,
1965
                     build_function_type (void_type_node, void_list_node));
1966
  current_function_decl = decl;
1967
 
1968
  resdecl = build_decl (input_location,
1969
                        RESULT_DECL, NULL_TREE, void_type_node);
1970
  DECL_ARTIFICIAL (resdecl) = 1;
1971
  DECL_RESULT (decl) = resdecl;
1972
  DECL_CONTEXT (resdecl) = decl;
1973
 
1974
  allocate_struct_function (decl, false);
1975
 
1976
  TREE_STATIC (decl) = 1;
1977
  TREE_USED (decl) = 1;
1978
  DECL_ARTIFICIAL (decl) = 1;
1979
  DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1980
  DECL_SAVED_TREE (decl) = body;
1981
  if (!targetm.have_ctors_dtors)
1982
    {
1983
      TREE_PUBLIC (decl) = 1;
1984
      DECL_PRESERVE_P (decl) = 1;
1985
    }
1986
  DECL_UNINLINABLE (decl) = 1;
1987
 
1988
  DECL_INITIAL (decl) = make_node (BLOCK);
1989
  TREE_USED (DECL_INITIAL (decl)) = 1;
1990
 
1991
  DECL_SOURCE_LOCATION (decl) = input_location;
1992
  cfun->function_end_locus = input_location;
1993
 
1994
  switch (which)
1995
    {
1996
    case 'I':
1997
      DECL_STATIC_CONSTRUCTOR (decl) = 1;
1998
      decl_init_priority_insert (decl, priority);
1999
      break;
2000
    case 'D':
2001
      DECL_STATIC_DESTRUCTOR (decl) = 1;
2002
      decl_fini_priority_insert (decl, priority);
2003
      break;
2004
    default:
2005
      gcc_unreachable ();
2006
    }
2007
 
2008
  gimplify_function_tree (decl);
2009
 
2010
  cgraph_add_new_function (decl, false);
2011
  cgraph_mark_needed_node (cgraph_node (decl));
2012
  set_cfun (NULL);
2013
}
2014
 
2015
void
2016
init_cgraph (void)
2017
{
2018
  cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2019
}
2020
 
2021
/* The edges representing the callers of the NEW_VERSION node were
2022
   fixed by cgraph_function_versioning (), now the call_expr in their
2023
   respective tree code should be updated to call the NEW_VERSION.  */
2024
 
2025
static void
2026
update_call_expr (struct cgraph_node *new_version)
2027
{
2028
  struct cgraph_edge *e;
2029
 
2030
  gcc_assert (new_version);
2031
 
2032
  /* Update the call expr on the edges to call the new version.  */
2033
  for (e = new_version->callers; e; e = e->next_caller)
2034
    {
2035
      struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2036
      gimple_call_set_fndecl (e->call_stmt, new_version->decl);
2037
      maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
2038
    }
2039
}
2040
 
2041
 
2042
/* Create a new cgraph node which is the new version of
2043
   OLD_VERSION node.  REDIRECT_CALLERS holds the callers
2044
   edges which should be redirected to point to
2045
   NEW_VERSION.  ALL the callees edges of OLD_VERSION
2046
   are cloned to the new version node.  Return the new
2047
   version node.  */
2048
 
2049
static struct cgraph_node *
2050
cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
2051
                                 tree new_decl,
2052
                                 VEC(cgraph_edge_p,heap) *redirect_callers)
2053
 {
2054
   struct cgraph_node *new_version;
2055
   struct cgraph_edge *e, *new_e;
2056
   struct cgraph_edge *next_callee;
2057
   unsigned i;
2058
 
2059
   gcc_assert (old_version);
2060
 
2061
   new_version = cgraph_node (new_decl);
2062
 
2063
   new_version->analyzed = true;
2064
   new_version->local = old_version->local;
2065
   new_version->global = old_version->global;
2066
   new_version->rtl = new_version->rtl;
2067
   new_version->reachable = true;
2068
   new_version->count = old_version->count;
2069
 
2070
   /* Clone the old node callees.  Recursive calls are
2071
      also cloned.  */
2072
   for (e = old_version->callees;e; e=e->next_callee)
2073
     {
2074
       new_e = cgraph_clone_edge (e, new_version, e->call_stmt,
2075
                                  e->lto_stmt_uid, 0, e->frequency,
2076
                                  e->loop_nest, true);
2077
       new_e->count = e->count;
2078
     }
2079
   /* Fix recursive calls.
2080
      If OLD_VERSION has a recursive call after the
2081
      previous edge cloning, the new version will have an edge
2082
      pointing to the old version, which is wrong;
2083
      Redirect it to point to the new version. */
2084
   for (e = new_version->callees ; e; e = next_callee)
2085
     {
2086
       next_callee = e->next_callee;
2087
       if (e->callee == old_version)
2088
         cgraph_redirect_edge_callee (e, new_version);
2089
 
2090
       if (!next_callee)
2091
         break;
2092
     }
2093
   for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2094
     {
2095
       /* Redirect calls to the old version node to point to its new
2096
          version.  */
2097
       cgraph_redirect_edge_callee (e, new_version);
2098
     }
2099
 
2100
   return new_version;
2101
 }
2102
 
2103
 /* Perform function versioning.
2104
    Function versioning includes copying of the tree and
2105
    a callgraph update (creating a new cgraph node and updating
2106
    its callees and callers).
2107
 
2108
    REDIRECT_CALLERS varray includes the edges to be redirected
2109
    to the new version.
2110
 
2111
    TREE_MAP is a mapping of tree nodes we want to replace with
2112
    new ones (according to results of prior analysis).
2113
    OLD_VERSION_NODE is the node that is versioned.
2114
    It returns the new version's cgraph node.
2115
    ARGS_TO_SKIP lists arguments to be omitted from functions
2116
    */
2117
 
2118
struct cgraph_node *
2119
cgraph_function_versioning (struct cgraph_node *old_version_node,
2120
                            VEC(cgraph_edge_p,heap) *redirect_callers,
2121
                            VEC (ipa_replace_map_p,gc)* tree_map,
2122
                            bitmap args_to_skip)
2123
{
2124
  tree old_decl = old_version_node->decl;
2125
  struct cgraph_node *new_version_node = NULL;
2126
  tree new_decl;
2127
 
2128
  if (!tree_versionable_function_p (old_decl))
2129
    return NULL;
2130
 
2131
  /* Make a new FUNCTION_DECL tree node for the
2132
     new version. */
2133
  if (!args_to_skip)
2134
    new_decl = copy_node (old_decl);
2135
  else
2136
    new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2137
 
2138
  /* Generate a new name for the new version. */
2139
  DECL_NAME (new_decl) = clone_function_name (old_decl);
2140
  SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2141
  SET_DECL_RTL (new_decl, NULL);
2142
 
2143
  /* Create the new version's call-graph node.
2144
     and update the edges of the new node. */
2145
  new_version_node =
2146
    cgraph_copy_node_for_versioning (old_version_node, new_decl,
2147
                                     redirect_callers);
2148
 
2149
  /* Copy the OLD_VERSION_NODE function tree to the new version.  */
2150
  tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip);
2151
 
2152
  /* Update the new version's properties.
2153
     Make The new version visible only within this translation unit.  Make sure
2154
     that is not weak also.
2155
     ??? We cannot use COMDAT linkage because there is no
2156
     ABI support for this.  */
2157
  cgraph_make_decl_local (new_version_node->decl);
2158
  DECL_VIRTUAL_P (new_version_node->decl) = 0;
2159
  new_version_node->local.externally_visible = 0;
2160
  new_version_node->local.local = 1;
2161
  new_version_node->lowered = true;
2162
 
2163
  /* Update the call_expr on the edges to call the new version node. */
2164
  update_call_expr (new_version_node);
2165
 
2166
  cgraph_call_function_insertion_hooks (new_version_node);
2167
  return new_version_node;
2168
}
2169
 
2170
/* Produce separate function body for inline clones so the offline copy can be
2171
   modified without affecting them.  */
2172
struct cgraph_node *
2173
save_inline_function_body (struct cgraph_node *node)
2174
{
2175
  struct cgraph_node *first_clone, *n;
2176
 
2177
  gcc_assert (node == cgraph_node (node->decl));
2178
 
2179
  cgraph_lower_function (node);
2180
 
2181
  first_clone = node->clones;
2182
 
2183
  first_clone->decl = copy_node (node->decl);
2184
  cgraph_insert_node_to_hashtable (first_clone);
2185
  gcc_assert (first_clone == cgraph_node (first_clone->decl));
2186
  if (first_clone->next_sibling_clone)
2187
    {
2188
      for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2189
        n->clone_of = first_clone;
2190
      n->clone_of = first_clone;
2191
      n->next_sibling_clone = first_clone->clones;
2192
      if (first_clone->clones)
2193
        first_clone->clones->prev_sibling_clone = n;
2194
      first_clone->clones = first_clone->next_sibling_clone;
2195
      first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2196
      first_clone->next_sibling_clone = NULL;
2197
      gcc_assert (!first_clone->prev_sibling_clone);
2198
    }
2199
  first_clone->clone_of = NULL;
2200
  node->clones = NULL;
2201
 
2202
  if (first_clone->clones)
2203
    for (n = first_clone->clones; n != first_clone;)
2204
      {
2205
        gcc_assert (n->decl == node->decl);
2206
        n->decl = first_clone->decl;
2207
        if (n->clones)
2208
          n = n->clones;
2209
        else if (n->next_sibling_clone)
2210
          n = n->next_sibling_clone;
2211
        else
2212
          {
2213
            while (n != first_clone && !n->next_sibling_clone)
2214
              n = n->clone_of;
2215
            if (n != first_clone)
2216
              n = n->next_sibling_clone;
2217
          }
2218
      }
2219
 
2220
  /* Copy the OLD_VERSION_NODE function tree to the new version.  */
2221
  tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
2222
 
2223
  DECL_EXTERNAL (first_clone->decl) = 0;
2224
  DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
2225
  TREE_PUBLIC (first_clone->decl) = 0;
2226
  DECL_COMDAT (first_clone->decl) = 0;
2227
  VEC_free (ipa_opt_pass, heap,
2228
            first_clone->ipa_transforms_to_apply);
2229
  first_clone->ipa_transforms_to_apply = NULL;
2230
 
2231
#ifdef ENABLE_CHECKING
2232
  verify_cgraph_node (first_clone);
2233
#endif
2234
  return first_clone;
2235
}
2236
 
2237
/* Given virtual clone, turn it into actual clone.  */
2238
static void
2239
cgraph_materialize_clone (struct cgraph_node *node)
2240
{
2241
  bitmap_obstack_initialize (NULL);
2242
  /* Copy the OLD_VERSION_NODE function tree to the new version.  */
2243
  tree_function_versioning (node->clone_of->decl, node->decl,
2244
                            node->clone.tree_map, true,
2245
                            node->clone.args_to_skip);
2246
  if (cgraph_dump_file)
2247
    {
2248
      dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2249
      dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2250
    }
2251
 
2252
  /* Function is no longer clone.  */
2253
  if (node->next_sibling_clone)
2254
    node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2255
  if (node->prev_sibling_clone)
2256
    node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2257
  else
2258
    node->clone_of->clones = node->next_sibling_clone;
2259
  node->next_sibling_clone = NULL;
2260
  node->prev_sibling_clone = NULL;
2261
  if (!node->clone_of->analyzed && !node->clone_of->clones)
2262
    cgraph_remove_node (node->clone_of);
2263
  node->clone_of = NULL;
2264
  bitmap_obstack_release (NULL);
2265
}
2266
 
2267
/* If necessary, change the function declaration in the call statement
2268
   associated with E so that it corresponds to the edge callee.  */
2269
 
2270
gimple
2271
cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2272
{
2273
  tree decl = gimple_call_fndecl (e->call_stmt);
2274
  gimple new_stmt;
2275
 
2276
  if (!decl || decl == e->callee->decl
2277
      /* Don't update call from same body alias to the real function.  */
2278
      || cgraph_get_node (decl) == cgraph_get_node (e->callee->decl))
2279
    return e->call_stmt;
2280
 
2281
  if (cgraph_dump_file)
2282
    {
2283
      fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2284
               cgraph_node_name (e->caller), e->caller->uid,
2285
               cgraph_node_name (e->callee), e->callee->uid);
2286
      print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2287
    }
2288
 
2289
  if (e->callee->clone.combined_args_to_skip)
2290
    {
2291
      gimple_stmt_iterator gsi;
2292
 
2293
      new_stmt
2294
        = gimple_call_copy_skip_args (e->call_stmt,
2295
                                      e->callee->clone.combined_args_to_skip);
2296
 
2297
      if (gimple_vdef (new_stmt)
2298
          && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2299
        SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2300
 
2301
      gsi = gsi_for_stmt (e->call_stmt);
2302
      gsi_replace (&gsi, new_stmt, true);
2303
    }
2304
  else
2305
    new_stmt = e->call_stmt;
2306
 
2307
  gimple_call_set_fndecl (new_stmt, e->callee->decl);
2308
 
2309
  cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2310
 
2311
  if (cgraph_dump_file)
2312
    {
2313
      fprintf (cgraph_dump_file, "  updated to:");
2314
      print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2315
    }
2316
  return new_stmt;
2317
}
2318
 
2319
/* Once all functions from compilation unit are in memory, produce all clones
2320
   and update all calls.  We might also do this on demand if we don't want to
2321
   bring all functions to memory prior compilation, but current WHOPR
2322
   implementation does that and it is is bit easier to keep everything right in
2323
   this order.  */
2324
void
2325
cgraph_materialize_all_clones (void)
2326
{
2327
  struct cgraph_node *node;
2328
  bool stabilized = false;
2329
 
2330
  if (cgraph_dump_file)
2331
    fprintf (cgraph_dump_file, "Materializing clones\n");
2332
#ifdef ENABLE_CHECKING
2333
  verify_cgraph ();
2334
#endif
2335
 
2336
  /* We can also do topological order, but number of iterations should be
2337
     bounded by number of IPA passes since single IPA pass is probably not
2338
     going to create clones of clones it created itself.  */
2339
  while (!stabilized)
2340
    {
2341
      stabilized = true;
2342
      for (node = cgraph_nodes; node; node = node->next)
2343
        {
2344
          if (node->clone_of && node->decl != node->clone_of->decl
2345
              && !gimple_has_body_p (node->decl))
2346
            {
2347
              if (gimple_has_body_p (node->clone_of->decl))
2348
                {
2349
                  if (cgraph_dump_file)
2350
                    {
2351
                      fprintf (cgraph_dump_file, "clonning %s to %s\n",
2352
                               cgraph_node_name (node->clone_of),
2353
                               cgraph_node_name (node));
2354
                      if (node->clone.tree_map)
2355
                        {
2356
                          unsigned int i;
2357
                          fprintf (cgraph_dump_file, "   replace map: ");
2358
                          for (i = 0; i < VEC_length (ipa_replace_map_p,
2359
                                                      node->clone.tree_map);
2360
                                                      i++)
2361
                            {
2362
                              struct ipa_replace_map *replace_info;
2363
                              replace_info = VEC_index (ipa_replace_map_p,
2364
                                                        node->clone.tree_map,
2365
                                                        i);
2366
                              print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2367
                              fprintf (cgraph_dump_file, " -> ");
2368
                              print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2369
                              fprintf (cgraph_dump_file, "%s%s;",
2370
                                       replace_info->replace_p ? "(replace)":"",
2371
                                       replace_info->ref_p ? "(ref)":"");
2372
                            }
2373
                          fprintf (cgraph_dump_file, "\n");
2374
                        }
2375
                      if (node->clone.args_to_skip)
2376
                        {
2377
                          fprintf (cgraph_dump_file, "   args_to_skip: ");
2378
                          dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2379
                        }
2380
                      if (node->clone.args_to_skip)
2381
                        {
2382
                          fprintf (cgraph_dump_file, "   combined_args_to_skip:");
2383
                          dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2384
                        }
2385
                    }
2386
                  cgraph_materialize_clone (node);
2387
                }
2388
              else
2389
                stabilized = false;
2390
            }
2391
        }
2392
    }
2393
  for (node = cgraph_nodes; node; node = node->next)
2394
    if (!node->analyzed && node->callees)
2395
      cgraph_node_remove_callees (node);
2396
  if (cgraph_dump_file)
2397
    fprintf (cgraph_dump_file, "Updating call sites\n");
2398
  for (node = cgraph_nodes; node; node = node->next)
2399
    if (node->analyzed && !node->clone_of
2400
        && gimple_has_body_p (node->decl))
2401
      {
2402
        struct cgraph_edge *e;
2403
 
2404
        current_function_decl = node->decl;
2405
        push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2406
        for (e = node->callees; e; e = e->next_callee)
2407
          cgraph_redirect_edge_call_stmt_to_callee (e);
2408
        pop_cfun ();
2409
        current_function_decl = NULL;
2410
#ifdef ENABLE_CHECKING
2411
        verify_cgraph_node (node);
2412
#endif
2413
      }
2414
  if (cgraph_dump_file)
2415
    fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2416
  /* All changes to parameters have been performed.  In order not to
2417
     incorrectly repeat them, we simply dispose of the bitmaps that drive the
2418
     changes. */
2419
  for (node = cgraph_nodes; node; node = node->next)
2420
    node->clone.combined_args_to_skip = NULL;
2421
#ifdef ENABLE_CHECKING
2422
  verify_cgraph ();
2423
#endif
2424
  cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2425
}
2426
 
2427
#include "gt-cgraphunit.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.