OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [cgraph.c] - Blame information for rev 20

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Callgraph handling code.
2
   Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3
   Contributed by Jan Hubicka
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 2, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING.  If not, write to the Free
19
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20
02110-1301, USA.  */
21
 
22
/*  This file contains basic routines manipulating call graph and variable pool
23
 
24
The callgraph:
25
 
26
    The call-graph is data structure designed for intra-procedural optimization
27
    but it is also used in non-unit-at-a-time compilation to allow easier code
28
    sharing.
29
 
30
    The call-graph consist of nodes and edges represented via linked lists.
31
    Each function (external or not) corresponds to the unique node (in
32
    contrast to tree DECL nodes where we can have multiple nodes for each
33
    function).
34
 
35
    The mapping from declarations to call-graph nodes is done using hash table
36
    based on DECL_ASSEMBLER_NAME, so it is essential for assembler name to
37
    not change once the declaration is inserted into the call-graph.
38
    The call-graph nodes are created lazily using cgraph_node function when
39
    called for unknown declaration.
40
 
41
    When built, there is one edge for each direct call.  It is possible that
42
    the reference will be later optimized out.  The call-graph is built
43
    conservatively in order to make conservative data flow analysis possible.
44
 
45
    The callgraph at the moment does not represent indirect calls or calls
46
    from other compilation unit.  Flag NEEDED is set for each node that may
47
    be accessed in such an invisible way and it shall be considered an
48
    entry point to the callgraph.
49
 
50
    Intraprocedural information:
51
 
52
      Callgraph is place to store data needed for intraprocedural optimization.
53
      All data structures are divided into three components: local_info that
54
      is produced while analyzing the function, global_info that is result
55
      of global walking of the callgraph on the end of compilation and
56
      rtl_info used by RTL backend to propagate data from already compiled
57
      functions to their callers.
58
 
59
    Inlining plans:
60
 
61
      The function inlining information is decided in advance and maintained
62
      in the callgraph as so called inline plan.
63
      For each inlined call, the callee's node is cloned to represent the
64
      new function copy produced by inliner.
65
      Each inlined call gets a unique corresponding clone node of the callee
66
      and the data structure is updated while inlining is performed, so
67
      the clones are eliminated and their callee edges redirected to the
68
      caller.
69
 
70
      Each edge has "inline_failed" field.  When the field is set to NULL,
71
      the call will be inlined.  When it is non-NULL it contains a reason
72
      why inlining wasn't performed.
73
 
74
 
75
The varpool data structure:
76
 
77
    Varpool is used to maintain variables in similar manner as call-graph
78
    is used for functions.  Most of the API is symmetric replacing cgraph
79
    function prefix by cgraph_varpool  */
80
 
81
 
82
#include "config.h"
83
#include "system.h"
84
#include "coretypes.h"
85
#include "tm.h"
86
#include "tree.h"
87
#include "tree-inline.h"
88
#include "langhooks.h"
89
#include "hashtab.h"
90
#include "toplev.h"
91
#include "flags.h"
92
#include "ggc.h"
93
#include "debug.h"
94
#include "target.h"
95
#include "basic-block.h"
96
#include "cgraph.h"
97
#include "varray.h"
98
#include "output.h"
99
#include "intl.h"
100
#include "tree-gimple.h"
101
#include "tree-dump.h"
102
 
103
static void cgraph_node_remove_callers (struct cgraph_node *node);
104
static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
105
static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
106
 
107
/* Hash table used to convert declarations into nodes.  */
108
static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
109
 
110
/* The linked list of cgraph nodes.  */
111
struct cgraph_node *cgraph_nodes;
112
 
113
/* Queue of cgraph nodes scheduled to be lowered.  */
114
struct cgraph_node *cgraph_nodes_queue;
115
 
116
/* Number of nodes in existence.  */
117
int cgraph_n_nodes;
118
 
119
/* Maximal uid used in cgraph nodes.  */
120
int cgraph_max_uid;
121
 
122
/* Set when whole unit has been analyzed so we can access global info.  */
123
bool cgraph_global_info_ready = false;
124
 
125
/* Set when the cgraph is fully build and the basic flags are computed.  */
126
bool cgraph_function_flags_ready = false;
127
 
128
/* Hash table used to convert declarations into nodes.  */
129
static GTY((param_is (struct cgraph_varpool_node))) htab_t cgraph_varpool_hash;
130
 
131
/* Queue of cgraph nodes scheduled to be lowered and output.  */
132
struct cgraph_varpool_node *cgraph_varpool_nodes_queue, *cgraph_varpool_first_unanalyzed_node;
133
 
134
 
135
/* The linked list of cgraph varpool nodes.  */
136
static GTY(()) struct cgraph_varpool_node *cgraph_varpool_nodes;
137
 
138
/* End of the varpool queue.  Needs to be QTYed to work with PCH.  */
139
static GTY(()) struct cgraph_varpool_node *cgraph_varpool_last_needed_node;
140
 
141
static hashval_t hash_node (const void *);
142
static int eq_node (const void *, const void *);
143
 
144
/* Returns a hash code for P.  */
145
 
146
static hashval_t
147
hash_node (const void *p)
148
{
149
  const struct cgraph_node *n = p;
150
  return (hashval_t) DECL_UID (n->decl);
151
}
152
 
153
/* Returns nonzero if P1 and P2 are equal.  */
154
 
155
static int
156
eq_node (const void *p1, const void *p2)
157
{
158
  const struct cgraph_node *n1 = p1, *n2 = p2;
159
  return DECL_UID (n1->decl) == DECL_UID (n2->decl);
160
}
161
 
162
/* Allocate new callgraph node and insert it into basic data structures.  */
163
static struct cgraph_node *
164
cgraph_create_node (void)
165
{
166
  struct cgraph_node *node;
167
 
168
  node = ggc_alloc_cleared (sizeof (*node));
169
  node->next = cgraph_nodes;
170
  node->uid = cgraph_max_uid++;
171
  if (cgraph_nodes)
172
    cgraph_nodes->previous = node;
173
  node->previous = NULL;
174
  node->global.estimated_growth = INT_MIN;
175
  cgraph_nodes = node;
176
  cgraph_n_nodes++;
177
  return node;
178
}
179
 
180
/* Return cgraph node assigned to DECL.  Create new one when needed.  */
181
struct cgraph_node *
182
cgraph_node (tree decl)
183
{
184
  struct cgraph_node key, *node, **slot;
185
 
186
  gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
187
 
188
  if (!cgraph_hash)
189
    cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
190
 
191
  key.decl = decl;
192
 
193
  slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
194
 
195
  if (*slot)
196
    {
197
      node = *slot;
198
      if (!node->master_clone)
199
        node->master_clone = node;
200
      return node;
201
    }
202
 
203
  node = cgraph_create_node ();
204
  node->decl = decl;
205
  *slot = node;
206
  if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
207
    {
208
      node->origin = cgraph_node (DECL_CONTEXT (decl));
209
      node->next_nested = node->origin->nested;
210
      node->origin->nested = node;
211
      node->master_clone = node;
212
    }
213
  return node;
214
}
215
 
216
/* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL.  */
217
 
218
static bool
219
decl_assembler_name_equal (tree decl, tree asmname)
220
{
221
  tree decl_asmname = DECL_ASSEMBLER_NAME (decl);
222
 
223
  if (decl_asmname == asmname)
224
    return true;
225
 
226
  /* If the target assembler name was set by the user, things are trickier.
227
     We have a leading '*' to begin with.  After that, it's arguable what
228
     is the correct thing to do with -fleading-underscore.  Arguably, we've
229
     historically been doing the wrong thing in assemble_alias by always
230
     printing the leading underscore.  Since we're not changing that, make
231
     sure user_label_prefix follows the '*' before matching.  */
232
  if (IDENTIFIER_POINTER (decl_asmname)[0] == '*')
233
    {
234
      const char *decl_str = IDENTIFIER_POINTER (decl_asmname) + 1;
235
      size_t ulp_len = strlen (user_label_prefix);
236
 
237
      if (ulp_len == 0)
238
        ;
239
      else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
240
        decl_str += ulp_len;
241
      else
242
        return false;
243
 
244
      return strcmp (decl_str, IDENTIFIER_POINTER (asmname)) == 0;
245
    }
246
 
247
  return false;
248
}
249
 
250
 
251
/* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
252
   Return NULL if there's no such node.  */
253
 
254
struct cgraph_node *
255
cgraph_node_for_asm (tree asmname)
256
{
257
  struct cgraph_node *node;
258
 
259
  for (node = cgraph_nodes; node ; node = node->next)
260
    if (decl_assembler_name_equal (node->decl, asmname))
261
      return node;
262
 
263
  return NULL;
264
}
265
 
266
/* Return callgraph edge representing CALL_EXPR statement.  */
267
struct cgraph_edge *
268
cgraph_edge (struct cgraph_node *node, tree call_stmt)
269
{
270
  struct cgraph_edge *e;
271
 
272
  /* This loop may turn out to be performance problem.  In such case adding
273
     hashtables into call nodes with very many edges is probably best
274
     solution.  It is not good idea to add pointer into CALL_EXPR itself
275
     because we want to make possible having multiple cgraph nodes representing
276
     different clones of the same body before the body is actually cloned.  */
277
  for (e = node->callees; e; e= e->next_callee)
278
    if (e->call_stmt == call_stmt)
279
      break;
280
  return e;
281
}
282
 
283
/* Create edge from CALLER to CALLEE in the cgraph.  */
284
 
285
struct cgraph_edge *
286
cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
287
                    tree call_stmt, gcov_type count, int nest)
288
{
289
  struct cgraph_edge *edge = ggc_alloc (sizeof (struct cgraph_edge));
290
#ifdef ENABLE_CHECKING
291
  struct cgraph_edge *e;
292
 
293
  for (e = caller->callees; e; e = e->next_callee)
294
    gcc_assert (e->call_stmt != call_stmt);
295
#endif
296
 
297
  gcc_assert (get_call_expr_in (call_stmt));
298
 
299
  if (!DECL_SAVED_TREE (callee->decl))
300
    edge->inline_failed = N_("function body not available");
301
  else if (callee->local.redefined_extern_inline)
302
    edge->inline_failed = N_("redefined extern inline functions are not "
303
                             "considered for inlining");
304
  else if (callee->local.inlinable)
305
    edge->inline_failed = N_("function not considered for inlining");
306
  else
307
    edge->inline_failed = N_("function not inlinable");
308
 
309
  edge->aux = NULL;
310
 
311
  edge->caller = caller;
312
  edge->callee = callee;
313
  edge->call_stmt = call_stmt;
314
  edge->prev_caller = NULL;
315
  edge->next_caller = callee->callers;
316
  if (callee->callers)
317
    callee->callers->prev_caller = edge;
318
  edge->prev_callee = NULL;
319
  edge->next_callee = caller->callees;
320
  if (caller->callees)
321
    caller->callees->prev_callee = edge;
322
  caller->callees = edge;
323
  callee->callers = edge;
324
  edge->count = count;
325
  edge->loop_nest = nest;
326
  return edge;
327
}
328
 
329
/* Remove the edge E from the list of the callers of the callee.  */
330
 
331
static inline void
332
cgraph_edge_remove_callee (struct cgraph_edge *e)
333
{
334
  if (e->prev_caller)
335
    e->prev_caller->next_caller = e->next_caller;
336
  if (e->next_caller)
337
    e->next_caller->prev_caller = e->prev_caller;
338
  if (!e->prev_caller)
339
    e->callee->callers = e->next_caller;
340
}
341
 
342
/* Remove the edge E from the list of the callees of the caller.  */
343
 
344
static inline void
345
cgraph_edge_remove_caller (struct cgraph_edge *e)
346
{
347
  if (e->prev_callee)
348
    e->prev_callee->next_callee = e->next_callee;
349
  if (e->next_callee)
350
    e->next_callee->prev_callee = e->prev_callee;
351
  if (!e->prev_callee)
352
    e->caller->callees = e->next_callee;
353
}
354
 
355
/* Remove the edge E in the cgraph.  */
356
 
357
void
358
cgraph_remove_edge (struct cgraph_edge *e)
359
{
360
  /* Remove from callers list of the callee.  */
361
  cgraph_edge_remove_callee (e);
362
 
363
  /* Remove from callees list of the callers.  */
364
  cgraph_edge_remove_caller (e);
365
}
366
 
367
/* Redirect callee of E to N.  The function does not update underlying
368
   call expression.  */
369
 
370
void
371
cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
372
{
373
  /* Remove from callers list of the current callee.  */
374
  cgraph_edge_remove_callee (e);
375
 
376
  /* Insert to callers list of the new callee.  */
377
  e->prev_caller = NULL;
378
  if (n->callers)
379
    n->callers->prev_caller = e;
380
  e->next_caller = n->callers;
381
  n->callers = e;
382
  e->callee = n;
383
}
384
 
385
/* Remove all callees from the node.  */
386
 
387
void
388
cgraph_node_remove_callees (struct cgraph_node *node)
389
{
390
  struct cgraph_edge *e;
391
 
392
  /* It is sufficient to remove the edges from the lists of callers of
393
     the callees.  The callee list of the node can be zapped with one
394
     assignment.  */
395
  for (e = node->callees; e; e = e->next_callee)
396
    cgraph_edge_remove_callee (e);
397
  node->callees = NULL;
398
}
399
 
400
/* Remove all callers from the node.  */
401
 
402
static void
403
cgraph_node_remove_callers (struct cgraph_node *node)
404
{
405
  struct cgraph_edge *e;
406
 
407
  /* It is sufficient to remove the edges from the lists of callees of
408
     the callers.  The caller list of the node can be zapped with one
409
     assignment.  */
410
  for (e = node->callers; e; e = e->next_caller)
411
    cgraph_edge_remove_caller (e);
412
  node->callers = NULL;
413
}
414
 
415
/* Remove the node from cgraph.  */
416
 
417
void
418
cgraph_remove_node (struct cgraph_node *node)
419
{
420
  void **slot;
421
  bool kill_body = false;
422
 
423
  cgraph_node_remove_callers (node);
424
  cgraph_node_remove_callees (node);
425
  while (node->nested)
426
    cgraph_remove_node (node->nested);
427
  if (node->origin)
428
    {
429
      struct cgraph_node **node2 = &node->origin->nested;
430
 
431
      while (*node2 != node)
432
        node2 = &(*node2)->next_nested;
433
      *node2 = node->next_nested;
434
    }
435
  if (node->previous)
436
    node->previous->next = node->next;
437
  else
438
    cgraph_nodes = node->next;
439
  if (node->next)
440
    node->next->previous = node->previous;
441
  slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
442
  if (*slot == node)
443
    {
444
      if (node->next_clone)
445
      {
446
        struct cgraph_node *new_node = node->next_clone;
447
        struct cgraph_node *n;
448
 
449
        /* Make the next clone be the master clone */
450
        for (n = new_node; n; n = n->next_clone)
451
          n->master_clone = new_node;
452
 
453
        *slot = new_node;
454
        node->next_clone->prev_clone = NULL;
455
      }
456
      else
457
        {
458
          htab_clear_slot (cgraph_hash, slot);
459
          kill_body = true;
460
        }
461
    }
462
  else
463
    {
464
      node->prev_clone->next_clone = node->next_clone;
465
      if (node->next_clone)
466
        node->next_clone->prev_clone = node->prev_clone;
467
    }
468
 
469
  /* While all the clones are removed after being proceeded, the function
470
     itself is kept in the cgraph even after it is compiled.  Check whether
471
     we are done with this body and reclaim it proactively if this is the case.
472
     */
473
  if (!kill_body && *slot)
474
    {
475
      struct cgraph_node *n = *slot;
476
      if (!n->next_clone && !n->global.inlined_to
477
          && (cgraph_global_info_ready
478
              && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl))))
479
        kill_body = true;
480
    }
481
 
482
  if (kill_body && !dump_enabled_p (TDI_tree_all) && flag_unit_at_a_time)
483
    {
484
      DECL_SAVED_TREE (node->decl) = NULL;
485
      DECL_STRUCT_FUNCTION (node->decl) = NULL;
486
      DECL_INITIAL (node->decl) = error_mark_node;
487
    }
488
  cgraph_n_nodes--;
489
  /* Do not free the structure itself so the walk over chain can continue.  */
490
}
491
 
492
/* Notify finalize_compilation_unit that given node is reachable.  */
493
 
494
void
495
cgraph_mark_reachable_node (struct cgraph_node *node)
496
{
497
  if (!node->reachable && node->local.finalized)
498
    {
499
      notice_global_symbol (node->decl);
500
      node->reachable = 1;
501
      gcc_assert (!cgraph_global_info_ready);
502
 
503
      node->next_needed = cgraph_nodes_queue;
504
      cgraph_nodes_queue = node;
505
    }
506
}
507
 
508
/* Likewise indicate that a node is needed, i.e. reachable via some
509
   external means.  */
510
 
511
void
512
cgraph_mark_needed_node (struct cgraph_node *node)
513
{
514
  node->needed = 1;
515
  cgraph_mark_reachable_node (node);
516
}
517
 
518
/* Return local info for the compiled function.  */
519
 
520
struct cgraph_local_info *
521
cgraph_local_info (tree decl)
522
{
523
  struct cgraph_node *node;
524
 
525
  gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
526
  node = cgraph_node (decl);
527
  return &node->local;
528
}
529
 
530
/* Return local info for the compiled function.  */
531
 
532
struct cgraph_global_info *
533
cgraph_global_info (tree decl)
534
{
535
  struct cgraph_node *node;
536
 
537
  gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
538
  node = cgraph_node (decl);
539
  return &node->global;
540
}
541
 
542
/* Return local info for the compiled function.  */
543
 
544
struct cgraph_rtl_info *
545
cgraph_rtl_info (tree decl)
546
{
547
  struct cgraph_node *node;
548
 
549
  gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
550
  node = cgraph_node (decl);
551
  if (decl != current_function_decl
552
      && !TREE_ASM_WRITTEN (node->decl))
553
    return NULL;
554
  return &node->rtl;
555
}
556
 
557
/* Return name of the node used in debug output.  */
558
const char *
559
cgraph_node_name (struct cgraph_node *node)
560
{
561
  return lang_hooks.decl_printable_name (node->decl, 2);
562
}
563
 
564
/* Return name of the node used in debug output.  */
565
static const char *
566
cgraph_varpool_node_name (struct cgraph_varpool_node *node)
567
{
568
  return lang_hooks.decl_printable_name (node->decl, 2);
569
}
570
 
571
/* Names used to print out the availability enum.  */
572
static const char * const availability_names[] =
573
  {"unset", "not_available", "overwrittable", "available", "local"};
574
 
575
/* Dump given cgraph node.  */
576
void
577
dump_cgraph_node (FILE *f, struct cgraph_node *node)
578
{
579
  struct cgraph_edge *edge;
580
  fprintf (f, "%s/%i:", cgraph_node_name (node), node->uid);
581
  if (node->global.inlined_to)
582
    fprintf (f, " (inline copy in %s/%i)",
583
             cgraph_node_name (node->global.inlined_to),
584
             node->global.inlined_to->uid);
585
  if (cgraph_function_flags_ready)
586
    fprintf (f, " availability:%s",
587
             availability_names [cgraph_function_body_availability (node)]);
588
  if (node->master_clone && node->master_clone->uid != node->uid)
589
    fprintf (f, "(%i)", node->master_clone->uid);
590
  if (node->count)
591
    fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
592
             (HOST_WIDEST_INT)node->count);
593
  if (node->local.self_insns)
594
    fprintf (f, " %i insns", node->local.self_insns);
595
  if (node->global.insns && node->global.insns != node->local.self_insns)
596
    fprintf (f, " (%i after inlining)", node->global.insns);
597
  if (node->origin)
598
    fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
599
  if (node->needed)
600
    fprintf (f, " needed");
601
  else if (node->reachable)
602
    fprintf (f, " reachable");
603
  if (DECL_SAVED_TREE (node->decl))
604
    fprintf (f, " tree");
605
  if (node->output)
606
    fprintf (f, " output");
607
  if (node->local.local)
608
    fprintf (f, " local");
609
  if (node->local.externally_visible)
610
    fprintf (f, " externally_visible");
611
  if (node->local.finalized)
612
    fprintf (f, " finalized");
613
  if (node->local.disregard_inline_limits)
614
    fprintf (f, " always_inline");
615
  else if (node->local.inlinable)
616
    fprintf (f, " inlinable");
617
  if (node->local.redefined_extern_inline)
618
    fprintf (f, " redefined_extern_inline");
619
  if (TREE_ASM_WRITTEN (node->decl))
620
    fprintf (f, " asm_written");
621
 
622
  fprintf (f, "\n  called by: ");
623
  for (edge = node->callers; edge; edge = edge->next_caller)
624
    {
625
      fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
626
               edge->caller->uid);
627
      if (edge->count)
628
        fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
629
                 (HOST_WIDEST_INT)edge->count);
630
      if (!edge->inline_failed)
631
        fprintf(f, "(inlined) ");
632
    }
633
 
634
  fprintf (f, "\n  calls: ");
635
  for (edge = node->callees; edge; edge = edge->next_callee)
636
    {
637
      fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
638
               edge->callee->uid);
639
      if (!edge->inline_failed)
640
        fprintf(f, "(inlined) ");
641
      if (edge->count)
642
        fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
643
                 (HOST_WIDEST_INT)edge->count);
644
      if (edge->loop_nest)
645
        fprintf (f, "(nested in %i loops) ", edge->loop_nest);
646
    }
647
  fprintf (f, "\n");
648
}
649
 
650
/* Dump the callgraph.  */
651
 
652
void
653
dump_cgraph (FILE *f)
654
{
655
  struct cgraph_node *node;
656
 
657
  fprintf (f, "callgraph:\n\n");
658
  for (node = cgraph_nodes; node; node = node->next)
659
    dump_cgraph_node (f, node);
660
}
661
 
662
/* Dump given cgraph node.  */
663
void
664
dump_cgraph_varpool_node (FILE *f, struct cgraph_varpool_node *node)
665
{
666
  fprintf (f, "%s:", cgraph_varpool_node_name (node));
667
  fprintf (f, " availability:%s", availability_names [cgraph_variable_initializer_availability (node)]);
668
  if (DECL_INITIAL (node->decl))
669
    fprintf (f, " initialized");
670
  if (node->needed)
671
    fprintf (f, " needed");
672
  if (node->analyzed)
673
    fprintf (f, " analyzed");
674
  if (node->finalized)
675
    fprintf (f, " finalized");
676
  if (node->output)
677
    fprintf (f, " output");
678
  if (node->externally_visible)
679
    fprintf (f, " externally_visible");
680
  fprintf (f, "\n");
681
}
682
 
683
/* Dump the callgraph.  */
684
 
685
void
686
dump_varpool (FILE *f)
687
{
688
  struct cgraph_varpool_node *node;
689
 
690
  fprintf (f, "variable pool:\n\n");
691
  for (node = cgraph_varpool_nodes; node; node = node->next_needed)
692
    dump_cgraph_varpool_node (f, node);
693
}
694
 
695
/* Returns a hash code for P.  */
696
 
697
static hashval_t
698
hash_varpool_node (const void *p)
699
{
700
  const struct cgraph_varpool_node *n = p;
701
  return (hashval_t) DECL_UID (n->decl);
702
}
703
 
704
/* Returns nonzero if P1 and P2 are equal.  */
705
 
706
static int
707
eq_varpool_node (const void *p1, const void *p2)
708
{
709
  const struct cgraph_varpool_node *n1 = p1, *n2 = p2;
710
  return DECL_UID (n1->decl) == DECL_UID (n2->decl);
711
}
712
 
713
/* Return cgraph_varpool node assigned to DECL.  Create new one when needed.  */
714
struct cgraph_varpool_node *
715
cgraph_varpool_node (tree decl)
716
{
717
  struct cgraph_varpool_node key, *node, **slot;
718
 
719
  gcc_assert (DECL_P (decl) && TREE_CODE (decl) != FUNCTION_DECL);
720
 
721
  if (!cgraph_varpool_hash)
722
    cgraph_varpool_hash = htab_create_ggc (10, hash_varpool_node,
723
                                           eq_varpool_node, NULL);
724
  key.decl = decl;
725
  slot = (struct cgraph_varpool_node **)
726
    htab_find_slot (cgraph_varpool_hash, &key, INSERT);
727
  if (*slot)
728
    return *slot;
729
  node = ggc_alloc_cleared (sizeof (*node));
730
  node->decl = decl;
731
  node->next = cgraph_varpool_nodes;
732
  cgraph_varpool_nodes = node;
733
  *slot = node;
734
  return node;
735
}
736
 
737
struct cgraph_varpool_node *
738
cgraph_varpool_node_for_asm (tree asmname)
739
{
740
  struct cgraph_varpool_node *node;
741
 
742
  for (node = cgraph_varpool_nodes; node ; node = node->next)
743
    if (decl_assembler_name_equal (node->decl, asmname))
744
      return node;
745
 
746
  return NULL;
747
}
748
 
749
/* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables.  */
750
void
751
change_decl_assembler_name (tree decl, tree name)
752
{
753
  if (!DECL_ASSEMBLER_NAME_SET_P (decl))
754
    {
755
      SET_DECL_ASSEMBLER_NAME (decl, name);
756
      return;
757
    }
758
  if (name == DECL_ASSEMBLER_NAME (decl))
759
    return;
760
 
761
  if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
762
      && DECL_RTL_SET_P (decl))
763
    warning (0, "%D renamed after being referenced in assembly", decl);
764
 
765
  SET_DECL_ASSEMBLER_NAME (decl, name);
766
}
767
 
768
/* Helper function for finalization code - add node into lists so it will
769
   be analyzed and compiled.  */
770
void
771
cgraph_varpool_enqueue_needed_node (struct cgraph_varpool_node *node)
772
{
773
  if (cgraph_varpool_last_needed_node)
774
    cgraph_varpool_last_needed_node->next_needed = node;
775
  cgraph_varpool_last_needed_node = node;
776
  node->next_needed = NULL;
777
  if (!cgraph_varpool_nodes_queue)
778
    cgraph_varpool_nodes_queue = node;
779
  if (!cgraph_varpool_first_unanalyzed_node)
780
    cgraph_varpool_first_unanalyzed_node = node;
781
  notice_global_symbol (node->decl);
782
}
783
 
784
/* Reset the queue of needed nodes.  */
785
void
786
cgraph_varpool_reset_queue (void)
787
{
788
  cgraph_varpool_last_needed_node = NULL;
789
  cgraph_varpool_nodes_queue = NULL;
790
  cgraph_varpool_first_unanalyzed_node = NULL;
791
}
792
 
793
/* Notify finalize_compilation_unit that given node is reachable
794
   or needed.  */
795
void
796
cgraph_varpool_mark_needed_node (struct cgraph_varpool_node *node)
797
{
798
  if (!node->needed && node->finalized)
799
    cgraph_varpool_enqueue_needed_node (node);
800
  node->needed = 1;
801
}
802
 
803
/* Determine if variable DECL is needed.  That is, visible to something
804
   either outside this translation unit, something magic in the system
805
   configury, or (if not doing unit-at-a-time) to something we haven't
806
   seen yet.  */
807
 
808
bool
809
decide_is_variable_needed (struct cgraph_varpool_node *node, tree decl)
810
{
811
  /* If the user told us it is used, then it must be so.  */
812
  if (node->externally_visible
813
      || lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
814
    return true;
815
 
816
  /* ??? If the assembler name is set by hand, it is possible to assemble
817
     the name later after finalizing the function and the fact is noticed
818
     in assemble_name then.  This is arguably a bug.  */
819
  if (DECL_ASSEMBLER_NAME_SET_P (decl)
820
      && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
821
    return true;
822
 
823
  /* If we decided it was needed before, but at the time we didn't have
824
     the definition available, then it's still needed.  */
825
  if (node->needed)
826
    return true;
827
 
828
  /* Externally visible variables must be output.  The exception is
829
     COMDAT variables that must be output only when they are needed.  */
830
  if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
831
    return true;
832
 
833
  if (flag_unit_at_a_time)
834
    return false;
835
 
836
  /* If not doing unit at a time, then we'll only defer this function
837
     if its marked for inlining.  Otherwise we want to emit it now.  */
838
 
839
  /* We want to emit COMDAT variables only when absolutely necessary.  */
840
  if (DECL_COMDAT (decl))
841
    return false;
842
  return true;
843
}
844
 
845
void
846
cgraph_varpool_finalize_decl (tree decl)
847
{
848
  struct cgraph_varpool_node *node = cgraph_varpool_node (decl);
849
 
850
  /* The first declaration of a variable that comes through this function
851
     decides whether it is global (in C, has external linkage)
852
     or local (in C, has internal linkage).  So do nothing more
853
     if this function has already run.  */
854
  if (node->finalized)
855
    {
856
      if (cgraph_global_info_ready || !flag_unit_at_a_time)
857
        cgraph_varpool_assemble_pending_decls ();
858
      return;
859
    }
860
  if (node->needed)
861
    cgraph_varpool_enqueue_needed_node (node);
862
  node->finalized = true;
863
 
864
  if (decide_is_variable_needed (node, decl))
865
    cgraph_varpool_mark_needed_node (node);
866
  /* Since we reclaim unreachable nodes at the end of every language
867
     level unit, we need to be conservative about possible entry points
868
     there.  */
869
  else if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
870
    cgraph_varpool_mark_needed_node (node);
871
  if (cgraph_global_info_ready || !flag_unit_at_a_time)
872
    cgraph_varpool_assemble_pending_decls ();
873
}
874
 
875
/* Return true when the DECL can possibly be inlined.  */
876
bool
877
cgraph_function_possibly_inlined_p (tree decl)
878
{
879
  if (!cgraph_global_info_ready)
880
    return (DECL_INLINE (decl) && !flag_really_no_inline);
881
  return DECL_POSSIBLY_INLINED (decl);
882
}
883
 
884
/* Create clone of E in the node N represented by CALL_EXPR the callgraph.  */
885
struct cgraph_edge *
886
cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
887
                   tree call_stmt, gcov_type count_scale, int loop_nest,
888
                   bool update_original)
889
{
890
  struct cgraph_edge *new;
891
 
892
  new = cgraph_create_edge (n, e->callee, call_stmt,
893
                            e->count * count_scale / REG_BR_PROB_BASE,
894
                            e->loop_nest + loop_nest);
895
 
896
  new->inline_failed = e->inline_failed;
897
  if (update_original)
898
    {
899
      e->count -= new->count;
900
      if (e->count < 0)
901
        e->count = 0;
902
    }
903
  return new;
904
}
905
 
906
/* Create node representing clone of N executed COUNT times.  Decrease
907
   the execution counts from original node too.
908
 
909
   When UPDATE_ORIGINAL is true, the counts are subtracted from the original
910
   function's profile to reflect the fact that part of execution is handled
911
   by node.  */
912
struct cgraph_node *
913
cgraph_clone_node (struct cgraph_node *n, gcov_type count, int loop_nest,
914
                   bool update_original)
915
{
916
  struct cgraph_node *new = cgraph_create_node ();
917
  struct cgraph_edge *e;
918
  gcov_type count_scale;
919
 
920
  new->decl = n->decl;
921
  new->origin = n->origin;
922
  if (new->origin)
923
    {
924
      new->next_nested = new->origin->nested;
925
      new->origin->nested = new;
926
    }
927
  new->analyzed = n->analyzed;
928
  new->local = n->local;
929
  new->global = n->global;
930
  new->rtl = n->rtl;
931
  new->master_clone = n->master_clone;
932
  new->count = count;
933
  if (n->count)
934
    count_scale = new->count * REG_BR_PROB_BASE / n->count;
935
  else
936
    count_scale = 0;
937
  if (update_original)
938
    {
939
      n->count -= count;
940
      if (n->count < 0)
941
        n->count = 0;
942
    }
943
 
944
  for (e = n->callees;e; e=e->next_callee)
945
    cgraph_clone_edge (e, new, e->call_stmt, count_scale, loop_nest,
946
                       update_original);
947
 
948
  new->next_clone = n->next_clone;
949
  new->prev_clone = n;
950
  n->next_clone = new;
951
  if (new->next_clone)
952
    new->next_clone->prev_clone = new;
953
 
954
  return new;
955
}
956
 
957
/* Return true if N is an master_clone, (see cgraph_master_clone).  */
958
 
959
bool
960
cgraph_is_master_clone (struct cgraph_node *n)
961
{
962
  return (n == cgraph_master_clone (n));
963
}
964
 
965
struct cgraph_node *
966
cgraph_master_clone (struct cgraph_node *n)
967
{
968
  enum availability avail = cgraph_function_body_availability (n);
969
 
970
  if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
971
    return NULL;
972
 
973
  if (!n->master_clone)
974
    n->master_clone = cgraph_node (n->decl);
975
 
976
  return n->master_clone;
977
}
978
 
979
/* NODE is no longer nested function; update cgraph accordingly.  */
980
void
981
cgraph_unnest_node (struct cgraph_node *node)
982
{
983
  struct cgraph_node **node2 = &node->origin->nested;
984
  gcc_assert (node->origin);
985
 
986
  while (*node2 != node)
987
    node2 = &(*node2)->next_nested;
988
  *node2 = node->next_nested;
989
  node->origin = NULL;
990
}
991
 
992
/* Return function availability.  See cgraph.h for description of individual
993
   return values.  */
994
enum availability
995
cgraph_function_body_availability (struct cgraph_node *node)
996
{
997
  enum availability avail;
998
  gcc_assert (cgraph_function_flags_ready);
999
  if (!node->analyzed)
1000
    avail = AVAIL_NOT_AVAILABLE;
1001
  else if (node->local.local)
1002
    avail = AVAIL_LOCAL;
1003
  else if (node->local.externally_visible)
1004
    avail = AVAIL_AVAILABLE;
1005
 
1006
  /* If the function can be overwritten, return OVERWRITABLE.  Take
1007
     care at least of two notable extensions - the COMDAT functions
1008
     used to share template instantiations in C++ (this is symmetric
1009
     to code cp_cannot_inline_tree_fn and probably shall be shared and
1010
     the inlinability hooks completely eliminated).
1011
 
1012
     ??? Does the C++ one definition rule allow us to always return
1013
     AVAIL_AVAILABLE here?  That would be good reason to preserve this
1014
     hook Similarly deal with extern inline functions - this is again
1015
     necessary to get C++ shared functions having keyed templates
1016
     right and in the C extension documentation we probably should
1017
     document the requirement of both versions of function (extern
1018
     inline and offline) having same side effect characteristics as
1019
     good optimization is what this optimization is about.  */
1020
 
1021
  else if (!(*targetm.binds_local_p) (node->decl)
1022
           && !DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl))
1023
    avail = AVAIL_OVERWRITABLE;
1024
  else avail = AVAIL_AVAILABLE;
1025
 
1026
  return avail;
1027
}
1028
 
1029
/* Return variable availability.  See cgraph.h for description of individual
1030
   return values.  */
1031
enum availability
1032
cgraph_variable_initializer_availability (struct cgraph_varpool_node *node)
1033
{
1034
  gcc_assert (cgraph_function_flags_ready);
1035
  if (!node->finalized)
1036
    return AVAIL_NOT_AVAILABLE;
1037
  if (!TREE_PUBLIC (node->decl))
1038
    return AVAIL_AVAILABLE;
1039
  /* If the variable can be overwritten, return OVERWRITABLE.  Takes
1040
     care of at least two notable extensions - the COMDAT variables
1041
     used to share template instantiations in C++.  */
1042
  if (!(*targetm.binds_local_p) (node->decl) && !DECL_COMDAT (node->decl))
1043
    return AVAIL_OVERWRITABLE;
1044
  return AVAIL_AVAILABLE;
1045
}
1046
 
1047
#include "gt-cgraph.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.