OpenCores
URL https://opencores.org/ocsvn/openrisc_2011-10-31/openrisc_2011-10-31/trunk

Subversion Repositories openrisc_2011-10-31

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.2.2/] [gcc/] [cgraph.c] - Blame information for rev 369

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Callgraph handling code.
2
   Copyright (C) 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
3
   Contributed by Jan Hubicka
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 3, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
/*  This file contains basic routines manipulating call graph and variable pool
22
 
23
The callgraph:
24
 
25
    The call-graph is data structure designed for intra-procedural optimization
26
    but it is also used in non-unit-at-a-time compilation to allow easier code
27
    sharing.
28
 
29
    The call-graph consist of nodes and edges represented via linked lists.
30
    Each function (external or not) corresponds to the unique node (in
31
    contrast to tree DECL nodes where we can have multiple nodes for each
32
    function).
33
 
34
    The mapping from declarations to call-graph nodes is done using hash table
35
    based on DECL_ASSEMBLER_NAME, so it is essential for assembler name to
36
    not change once the declaration is inserted into the call-graph.
37
    The call-graph nodes are created lazily using cgraph_node function when
38
    called for unknown declaration.
39
 
40
    When built, there is one edge for each direct call.  It is possible that
41
    the reference will be later optimized out.  The call-graph is built
42
    conservatively in order to make conservative data flow analysis possible.
43
 
44
    The callgraph at the moment does not represent indirect calls or calls
45
    from other compilation unit.  Flag NEEDED is set for each node that may
46
    be accessed in such an invisible way and it shall be considered an
47
    entry point to the callgraph.
48
 
49
    Interprocedural information:
50
 
51
      Callgraph is place to store data needed for interprocedural optimization.
52
      All data structures are divided into three components: local_info that
53
      is produced while analyzing the function, global_info that is result
54
      of global walking of the callgraph on the end of compilation and
55
      rtl_info used by RTL backend to propagate data from already compiled
56
      functions to their callers.
57
 
58
    Inlining plans:
59
 
60
      The function inlining information is decided in advance and maintained
61
      in the callgraph as so called inline plan.
62
      For each inlined call, the callee's node is cloned to represent the
63
      new function copy produced by inliner.
64
      Each inlined call gets a unique corresponding clone node of the callee
65
      and the data structure is updated while inlining is performed, so
66
      the clones are eliminated and their callee edges redirected to the
67
      caller.
68
 
69
      Each edge has "inline_failed" field.  When the field is set to NULL,
70
      the call will be inlined.  When it is non-NULL it contains a reason
71
      why inlining wasn't performed.
72
 
73
 
74
The varpool data structure:
75
 
76
    Varpool is used to maintain variables in similar manner as call-graph
77
    is used for functions.  Most of the API is symmetric replacing cgraph
78
    function prefix by cgraph_varpool  */
79
 
80
 
81
#include "config.h"
82
#include "system.h"
83
#include "coretypes.h"
84
#include "tm.h"
85
#include "tree.h"
86
#include "tree-inline.h"
87
#include "langhooks.h"
88
#include "hashtab.h"
89
#include "toplev.h"
90
#include "flags.h"
91
#include "ggc.h"
92
#include "debug.h"
93
#include "target.h"
94
#include "basic-block.h"
95
#include "cgraph.h"
96
#include "varray.h"
97
#include "output.h"
98
#include "intl.h"
99
#include "tree-gimple.h"
100
#include "tree-dump.h"
101
 
102
static void cgraph_node_remove_callers (struct cgraph_node *node);
103
static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
104
static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
105
 
106
/* Hash table used to convert declarations into nodes.  */
107
static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
108
 
109
/* The linked list of cgraph nodes.  */
110
struct cgraph_node *cgraph_nodes;
111
 
112
/* Queue of cgraph nodes scheduled to be lowered.  */
113
struct cgraph_node *cgraph_nodes_queue;
114
 
115
/* Queue of cgraph nodes scheduled to be expanded.  This is a
116
   secondary queue used during optimization to accommodate passes that
117
   may generate new functions that need to be optimized and expanded.  */
118
struct cgraph_node *cgraph_expand_queue;
119
 
120
/* Number of nodes in existence.  */
121
int cgraph_n_nodes;
122
 
123
/* Maximal uid used in cgraph nodes.  */
124
int cgraph_max_uid;
125
 
126
/* Set when whole unit has been analyzed so we can access global info.  */
127
bool cgraph_global_info_ready = false;
128
 
129
/* Set when the cgraph is fully build and the basic flags are computed.  */
130
bool cgraph_function_flags_ready = false;
131
 
132
/* Hash table used to convert declarations into nodes.  */
133
static GTY((param_is (struct cgraph_varpool_node))) htab_t cgraph_varpool_hash;
134
 
135
/* Queue of cgraph nodes scheduled to be lowered and output.  */
136
struct cgraph_varpool_node *cgraph_varpool_nodes_queue, *cgraph_varpool_first_unanalyzed_node;
137
 
138
/* The linked list of cgraph varpool nodes.  */
139
struct cgraph_varpool_node *cgraph_varpool_nodes;
140
 
141
/* End of the varpool queue.  */
142
struct cgraph_varpool_node *cgraph_varpool_last_needed_node;
143
 
144
/* Linked list of cgraph asm nodes.  */
145
struct cgraph_asm_node *cgraph_asm_nodes;
146
 
147
/* Last node in cgraph_asm_nodes.  */
148
static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
149
 
150
/* The order index of the next cgraph node to be created.  This is
151
   used so that we can sort the cgraph nodes in order by when we saw
152
   them, to support -fno-toplevel-reorder.  */
153
int cgraph_order;
154
 
155
static hashval_t hash_node (const void *);
156
static int eq_node (const void *, const void *);
157
 
158
/* Returns a hash code for P.  */
159
 
160
static hashval_t
161
hash_node (const void *p)
162
{
163
  const struct cgraph_node *n = (const struct cgraph_node *) p;
164
  return (hashval_t) DECL_UID (n->decl);
165
}
166
 
167
/* Returns nonzero if P1 and P2 are equal.  */
168
 
169
static int
170
eq_node (const void *p1, const void *p2)
171
{
172
  const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
173
  const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
174
  return DECL_UID (n1->decl) == DECL_UID (n2->decl);
175
}
176
 
177
/* Allocate new callgraph node and insert it into basic data structures.  */
178
static struct cgraph_node *
179
cgraph_create_node (void)
180
{
181
  struct cgraph_node *node;
182
 
183
  node = GGC_CNEW (struct cgraph_node);
184
  node->next = cgraph_nodes;
185
  node->uid = cgraph_max_uid++;
186
  node->order = cgraph_order++;
187
  if (cgraph_nodes)
188
    cgraph_nodes->previous = node;
189
  node->previous = NULL;
190
  node->global.estimated_growth = INT_MIN;
191
  cgraph_nodes = node;
192
  cgraph_n_nodes++;
193
  return node;
194
}
195
 
196
/* Return cgraph node assigned to DECL.  Create new one when needed.  */
197
struct cgraph_node *
198
cgraph_node (tree decl)
199
{
200
  struct cgraph_node key, *node, **slot;
201
 
202
  gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
203
 
204
  if (!cgraph_hash)
205
    cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
206
 
207
  key.decl = decl;
208
 
209
  slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
210
 
211
  if (*slot)
212
    {
213
      node = *slot;
214
      if (!node->master_clone)
215
        node->master_clone = node;
216
      return node;
217
    }
218
 
219
  node = cgraph_create_node ();
220
  node->decl = decl;
221
  *slot = node;
222
  if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
223
    {
224
      node->origin = cgraph_node (DECL_CONTEXT (decl));
225
      node->next_nested = node->origin->nested;
226
      node->origin->nested = node;
227
      node->master_clone = node;
228
    }
229
  return node;
230
}
231
 
232
/* Insert already constructed node into hashtable.  */
233
 
234
void
235
cgraph_insert_node_to_hashtable (struct cgraph_node *node)
236
{
237
  struct cgraph_node **slot;
238
 
239
  slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
240
 
241
  gcc_assert (!*slot);
242
  *slot = node;
243
}
244
 
245
/* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL.  */
246
 
247
static bool
248
decl_assembler_name_equal (tree decl, tree asmname)
249
{
250
  tree decl_asmname = DECL_ASSEMBLER_NAME (decl);
251
 
252
  if (decl_asmname == asmname)
253
    return true;
254
 
255
  /* If the target assembler name was set by the user, things are trickier.
256
     We have a leading '*' to begin with.  After that, it's arguable what
257
     is the correct thing to do with -fleading-underscore.  Arguably, we've
258
     historically been doing the wrong thing in assemble_alias by always
259
     printing the leading underscore.  Since we're not changing that, make
260
     sure user_label_prefix follows the '*' before matching.  */
261
  if (IDENTIFIER_POINTER (decl_asmname)[0] == '*')
262
    {
263
      const char *decl_str = IDENTIFIER_POINTER (decl_asmname) + 1;
264
      size_t ulp_len = strlen (user_label_prefix);
265
 
266
      if (ulp_len == 0)
267
        ;
268
      else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
269
        decl_str += ulp_len;
270
      else
271
        return false;
272
 
273
      return strcmp (decl_str, IDENTIFIER_POINTER (asmname)) == 0;
274
    }
275
 
276
  return false;
277
}
278
 
279
 
280
/* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
281
   Return NULL if there's no such node.  */
282
 
283
struct cgraph_node *
284
cgraph_node_for_asm (tree asmname)
285
{
286
  struct cgraph_node *node;
287
 
288
  for (node = cgraph_nodes; node ; node = node->next)
289
    if (decl_assembler_name_equal (node->decl, asmname))
290
      return node;
291
 
292
  return NULL;
293
}
294
 
295
/* Returns a hash value for X (which really is a die_struct).  */
296
 
297
static hashval_t
298
edge_hash (const void *x)
299
{
300
  return htab_hash_pointer (((struct cgraph_edge *) x)->call_stmt);
301
}
302
 
303
/* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y.  */
304
 
305
static int
306
edge_eq (const void *x, const void *y)
307
{
308
  return ((struct cgraph_edge *) x)->call_stmt == y;
309
}
310
 
311
/* Return callgraph edge representing CALL_EXPR statement.  */
312
struct cgraph_edge *
313
cgraph_edge (struct cgraph_node *node, tree call_stmt)
314
{
315
  struct cgraph_edge *e, *e2;
316
  int n = 0;
317
 
318
  if (node->call_site_hash)
319
    return htab_find_with_hash (node->call_site_hash, call_stmt,
320
                                htab_hash_pointer (call_stmt));
321
 
322
  /* This loop may turn out to be performance problem.  In such case adding
323
     hashtables into call nodes with very many edges is probably best
324
     solution.  It is not good idea to add pointer into CALL_EXPR itself
325
     because we want to make possible having multiple cgraph nodes representing
326
     different clones of the same body before the body is actually cloned.  */
327
  for (e = node->callees; e; e= e->next_callee)
328
    {
329
      if (e->call_stmt == call_stmt)
330
        break;
331
      n++;
332
    }
333
  if (n > 100)
334
    {
335
      node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
336
      for (e2 = node->callees; e2; e2 = e2->next_callee)
337
        {
338
          void **slot;
339
          slot = htab_find_slot_with_hash (node->call_site_hash,
340
                                           e2->call_stmt,
341
                                           htab_hash_pointer (e2->call_stmt),
342
                                           INSERT);
343
          gcc_assert (!*slot);
344
          *slot = e2;
345
        }
346
    }
347
  return e;
348
}
349
 
350
/* Change call_smtt of edge E to NEW_STMT.  */
351
void
352
cgraph_set_call_stmt (struct cgraph_edge *e, tree new_stmt)
353
{
354
  if (e->caller->call_site_hash)
355
    {
356
      htab_remove_elt_with_hash (e->caller->call_site_hash,
357
                                 e->call_stmt,
358
                                 htab_hash_pointer (e->call_stmt));
359
    }
360
  e->call_stmt = new_stmt;
361
  if (e->caller->call_site_hash)
362
    {
363
      void **slot;
364
      slot = htab_find_slot_with_hash (e->caller->call_site_hash,
365
                                       e->call_stmt,
366
                                       htab_hash_pointer
367
                                       (e->call_stmt), INSERT);
368
      gcc_assert (!*slot);
369
      *slot = e;
370
    }
371
}
372
 
373
/* Create edge from CALLER to CALLEE in the cgraph.  */
374
 
375
struct cgraph_edge *
376
cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
377
                    tree call_stmt, gcov_type count, int nest)
378
{
379
  struct cgraph_edge *edge = GGC_NEW (struct cgraph_edge);
380
#ifdef ENABLE_CHECKING
381
  struct cgraph_edge *e;
382
 
383
  for (e = caller->callees; e; e = e->next_callee)
384
    gcc_assert (e->call_stmt != call_stmt);
385
#endif
386
 
387
  gcc_assert (get_call_expr_in (call_stmt));
388
 
389
  if (!DECL_SAVED_TREE (callee->decl))
390
    edge->inline_failed = N_("function body not available");
391
  else if (callee->local.redefined_extern_inline)
392
    edge->inline_failed = N_("redefined extern inline functions are not "
393
                             "considered for inlining");
394
  else if (callee->local.inlinable)
395
    edge->inline_failed = N_("function not considered for inlining");
396
  else
397
    edge->inline_failed = N_("function not inlinable");
398
 
399
  edge->aux = NULL;
400
 
401
  edge->caller = caller;
402
  edge->callee = callee;
403
  edge->call_stmt = call_stmt;
404
  edge->prev_caller = NULL;
405
  edge->next_caller = callee->callers;
406
  if (callee->callers)
407
    callee->callers->prev_caller = edge;
408
  edge->prev_callee = NULL;
409
  edge->next_callee = caller->callees;
410
  if (caller->callees)
411
    caller->callees->prev_callee = edge;
412
  caller->callees = edge;
413
  callee->callers = edge;
414
  edge->count = count;
415
  edge->loop_nest = nest;
416
  if (caller->call_site_hash)
417
    {
418
      void **slot;
419
      slot = htab_find_slot_with_hash (caller->call_site_hash,
420
                                       edge->call_stmt,
421
                                       htab_hash_pointer
422
                                         (edge->call_stmt),
423
                                       INSERT);
424
      gcc_assert (!*slot);
425
      *slot = edge;
426
    }
427
  return edge;
428
}
429
 
430
/* Remove the edge E from the list of the callers of the callee.  */
431
 
432
static inline void
433
cgraph_edge_remove_callee (struct cgraph_edge *e)
434
{
435
  if (e->prev_caller)
436
    e->prev_caller->next_caller = e->next_caller;
437
  if (e->next_caller)
438
    e->next_caller->prev_caller = e->prev_caller;
439
  if (!e->prev_caller)
440
    e->callee->callers = e->next_caller;
441
}
442
 
443
/* Remove the edge E from the list of the callees of the caller.  */
444
 
445
static inline void
446
cgraph_edge_remove_caller (struct cgraph_edge *e)
447
{
448
  if (e->prev_callee)
449
    e->prev_callee->next_callee = e->next_callee;
450
  if (e->next_callee)
451
    e->next_callee->prev_callee = e->prev_callee;
452
  if (!e->prev_callee)
453
    e->caller->callees = e->next_callee;
454
  if (e->caller->call_site_hash)
455
    htab_remove_elt_with_hash (e->caller->call_site_hash,
456
                               e->call_stmt,
457
                               htab_hash_pointer (e->call_stmt));
458
}
459
 
460
/* Remove the edge E in the cgraph.  */
461
 
462
void
463
cgraph_remove_edge (struct cgraph_edge *e)
464
{
465
  /* Remove from callers list of the callee.  */
466
  cgraph_edge_remove_callee (e);
467
 
468
  /* Remove from callees list of the callers.  */
469
  cgraph_edge_remove_caller (e);
470
}
471
 
472
/* Redirect callee of E to N.  The function does not update underlying
473
   call expression.  */
474
 
475
void
476
cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
477
{
478
  /* Remove from callers list of the current callee.  */
479
  cgraph_edge_remove_callee (e);
480
 
481
  /* Insert to callers list of the new callee.  */
482
  e->prev_caller = NULL;
483
  if (n->callers)
484
    n->callers->prev_caller = e;
485
  e->next_caller = n->callers;
486
  n->callers = e;
487
  e->callee = n;
488
}
489
 
490
/* Remove all callees from the node.  */
491
 
492
void
493
cgraph_node_remove_callees (struct cgraph_node *node)
494
{
495
  struct cgraph_edge *e;
496
 
497
  /* It is sufficient to remove the edges from the lists of callers of
498
     the callees.  The callee list of the node can be zapped with one
499
     assignment.  */
500
  for (e = node->callees; e; e = e->next_callee)
501
    cgraph_edge_remove_callee (e);
502
  node->callees = NULL;
503
  if (node->call_site_hash)
504
    {
505
      htab_delete (node->call_site_hash);
506
      node->call_site_hash = NULL;
507
    }
508
}
509
 
510
/* Remove all callers from the node.  */
511
 
512
static void
513
cgraph_node_remove_callers (struct cgraph_node *node)
514
{
515
  struct cgraph_edge *e;
516
 
517
  /* It is sufficient to remove the edges from the lists of callees of
518
     the callers.  The caller list of the node can be zapped with one
519
     assignment.  */
520
  for (e = node->callers; e; e = e->next_caller)
521
    cgraph_edge_remove_caller (e);
522
  node->callers = NULL;
523
}
524
 
525
/* Remove the node from cgraph.  */
526
 
527
void
528
cgraph_remove_node (struct cgraph_node *node)
529
{
530
  void **slot;
531
  bool kill_body = false;
532
 
533
  cgraph_node_remove_callers (node);
534
  cgraph_node_remove_callees (node);
535
  /* Incremental inlining access removed nodes stored in the postorder list.
536
     */
537
  node->needed = node->reachable = false;
538
  while (node->nested)
539
    cgraph_remove_node (node->nested);
540
  if (node->origin)
541
    {
542
      struct cgraph_node **node2 = &node->origin->nested;
543
 
544
      while (*node2 != node)
545
        node2 = &(*node2)->next_nested;
546
      *node2 = node->next_nested;
547
    }
548
  if (node->previous)
549
    node->previous->next = node->next;
550
  else
551
    cgraph_nodes = node->next;
552
  if (node->next)
553
    node->next->previous = node->previous;
554
  node->next = NULL;
555
  node->previous = NULL;
556
  slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
557
  if (*slot == node)
558
    {
559
      if (node->next_clone)
560
      {
561
        struct cgraph_node *new_node = node->next_clone;
562
        struct cgraph_node *n;
563
 
564
        /* Make the next clone be the master clone */
565
        for (n = new_node; n; n = n->next_clone)
566
          n->master_clone = new_node;
567
 
568
        *slot = new_node;
569
        node->next_clone->prev_clone = NULL;
570
      }
571
      else
572
        {
573
          htab_clear_slot (cgraph_hash, slot);
574
          kill_body = true;
575
        }
576
    }
577
  else
578
    {
579
      node->prev_clone->next_clone = node->next_clone;
580
      if (node->next_clone)
581
        node->next_clone->prev_clone = node->prev_clone;
582
    }
583
 
584
  /* While all the clones are removed after being proceeded, the function
585
     itself is kept in the cgraph even after it is compiled.  Check whether
586
     we are done with this body and reclaim it proactively if this is the case.
587
     */
588
  if (!kill_body && *slot)
589
    {
590
      struct cgraph_node *n = (struct cgraph_node *) *slot;
591
      if (!n->next_clone && !n->global.inlined_to
592
          && (cgraph_global_info_ready
593
              && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl))))
594
        kill_body = true;
595
    }
596
 
597
  if (kill_body && flag_unit_at_a_time)
598
    {
599
      DECL_SAVED_TREE (node->decl) = NULL;
600
      DECL_STRUCT_FUNCTION (node->decl) = NULL;
601
      DECL_INITIAL (node->decl) = error_mark_node;
602
    }
603
  node->decl = NULL;
604
  if (node->call_site_hash)
605
    {
606
      htab_delete (node->call_site_hash);
607
      node->call_site_hash = NULL;
608
    }
609
  cgraph_n_nodes--;
610
  /* Do not free the structure itself so the walk over chain can continue.  */
611
}
612
 
613
/* Notify finalize_compilation_unit that given node is reachable.  */
614
 
615
void
616
cgraph_mark_reachable_node (struct cgraph_node *node)
617
{
618
  if (!node->reachable && node->local.finalized)
619
    {
620
      notice_global_symbol (node->decl);
621
      node->reachable = 1;
622
      gcc_assert (!cgraph_global_info_ready);
623
 
624
      node->next_needed = cgraph_nodes_queue;
625
      cgraph_nodes_queue = node;
626
    }
627
}
628
 
629
/* Likewise indicate that a node is needed, i.e. reachable via some
630
   external means.  */
631
 
632
void
633
cgraph_mark_needed_node (struct cgraph_node *node)
634
{
635
  node->needed = 1;
636
  cgraph_mark_reachable_node (node);
637
}
638
 
639
/* Return local info for the compiled function.  */
640
 
641
struct cgraph_local_info *
642
cgraph_local_info (tree decl)
643
{
644
  struct cgraph_node *node;
645
 
646
  gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
647
  node = cgraph_node (decl);
648
  return &node->local;
649
}
650
 
651
/* Return local info for the compiled function.  */
652
 
653
struct cgraph_global_info *
654
cgraph_global_info (tree decl)
655
{
656
  struct cgraph_node *node;
657
 
658
  gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
659
  node = cgraph_node (decl);
660
  return &node->global;
661
}
662
 
663
/* Return local info for the compiled function.  */
664
 
665
struct cgraph_rtl_info *
666
cgraph_rtl_info (tree decl)
667
{
668
  struct cgraph_node *node;
669
 
670
  gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
671
  node = cgraph_node (decl);
672
  if (decl != current_function_decl
673
      && !TREE_ASM_WRITTEN (node->decl))
674
    return NULL;
675
  return &node->rtl;
676
}
677
 
678
/* Return name of the node used in debug output.  */
679
const char *
680
cgraph_node_name (struct cgraph_node *node)
681
{
682
  return lang_hooks.decl_printable_name (node->decl, 2);
683
}
684
 
685
/* Return name of the node used in debug output.  */
686
static const char *
687
cgraph_varpool_node_name (struct cgraph_varpool_node *node)
688
{
689
  return lang_hooks.decl_printable_name (node->decl, 2);
690
}
691
 
692
/* Names used to print out the availability enum.  */
693
static const char * const availability_names[] =
694
  {"unset", "not_available", "overwrittable", "available", "local"};
695
 
696
/* Dump given cgraph node.  */
697
void
698
dump_cgraph_node (FILE *f, struct cgraph_node *node)
699
{
700
  struct cgraph_edge *edge;
701
  fprintf (f, "%s/%i:", cgraph_node_name (node), node->uid);
702
  if (node->global.inlined_to)
703
    fprintf (f, " (inline copy in %s/%i)",
704
             cgraph_node_name (node->global.inlined_to),
705
             node->global.inlined_to->uid);
706
  if (cgraph_function_flags_ready)
707
    fprintf (f, " availability:%s",
708
             availability_names [cgraph_function_body_availability (node)]);
709
  if (node->master_clone && node->master_clone->uid != node->uid)
710
    fprintf (f, "(%i)", node->master_clone->uid);
711
  if (node->count)
712
    fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
713
             (HOST_WIDEST_INT)node->count);
714
  if (node->local.self_insns)
715
    fprintf (f, " %i insns", node->local.self_insns);
716
  if (node->global.insns && node->global.insns != node->local.self_insns)
717
    fprintf (f, " (%i after inlining)", node->global.insns);
718
  if (node->origin)
719
    fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
720
  if (node->needed)
721
    fprintf (f, " needed");
722
  else if (node->reachable)
723
    fprintf (f, " reachable");
724
  if (DECL_SAVED_TREE (node->decl))
725
    fprintf (f, " tree");
726
  if (node->output)
727
    fprintf (f, " output");
728
  if (node->local.local)
729
    fprintf (f, " local");
730
  if (node->local.externally_visible)
731
    fprintf (f, " externally_visible");
732
  if (node->local.finalized)
733
    fprintf (f, " finalized");
734
  if (node->local.disregard_inline_limits)
735
    fprintf (f, " always_inline");
736
  else if (node->local.inlinable)
737
    fprintf (f, " inlinable");
738
  if (node->local.redefined_extern_inline)
739
    fprintf (f, " redefined_extern_inline");
740
  if (TREE_ASM_WRITTEN (node->decl))
741
    fprintf (f, " asm_written");
742
 
743
  fprintf (f, "\n  called by: ");
744
  for (edge = node->callers; edge; edge = edge->next_caller)
745
    {
746
      fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
747
               edge->caller->uid);
748
      if (edge->count)
749
        fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
750
                 (HOST_WIDEST_INT)edge->count);
751
      if (!edge->inline_failed)
752
        fprintf(f, "(inlined) ");
753
    }
754
 
755
  fprintf (f, "\n  calls: ");
756
  for (edge = node->callees; edge; edge = edge->next_callee)
757
    {
758
      fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
759
               edge->callee->uid);
760
      if (!edge->inline_failed)
761
        fprintf(f, "(inlined) ");
762
      if (edge->count)
763
        fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
764
                 (HOST_WIDEST_INT)edge->count);
765
      if (edge->loop_nest)
766
        fprintf (f, "(nested in %i loops) ", edge->loop_nest);
767
    }
768
  fprintf (f, "\n");
769
}
770
 
771
/* Dump the callgraph.  */
772
 
773
void
774
dump_cgraph (FILE *f)
775
{
776
  struct cgraph_node *node;
777
 
778
  fprintf (f, "callgraph:\n\n");
779
  for (node = cgraph_nodes; node; node = node->next)
780
    dump_cgraph_node (f, node);
781
}
782
 
783
/* Dump given cgraph node.  */
784
void
785
dump_cgraph_varpool_node (FILE *f, struct cgraph_varpool_node *node)
786
{
787
  fprintf (f, "%s:", cgraph_varpool_node_name (node));
788
  fprintf (f, " availability:%s",
789
           cgraph_function_flags_ready
790
           ? availability_names[cgraph_variable_initializer_availability (node)]
791
           : "not-ready");
792
  if (DECL_INITIAL (node->decl))
793
    fprintf (f, " initialized");
794
  if (node->needed)
795
    fprintf (f, " needed");
796
  if (node->analyzed)
797
    fprintf (f, " analyzed");
798
  if (node->finalized)
799
    fprintf (f, " finalized");
800
  if (node->output)
801
    fprintf (f, " output");
802
  if (node->externally_visible)
803
    fprintf (f, " externally_visible");
804
  fprintf (f, "\n");
805
}
806
 
807
/* Dump the callgraph.  */
808
 
809
void
810
dump_varpool (FILE *f)
811
{
812
  struct cgraph_varpool_node *node;
813
 
814
  fprintf (f, "variable pool:\n\n");
815
  for (node = cgraph_varpool_nodes; node; node = node->next_needed)
816
    dump_cgraph_varpool_node (f, node);
817
}
818
 
819
/* Returns a hash code for P.  */
820
 
821
static hashval_t
822
hash_varpool_node (const void *p)
823
{
824
  const struct cgraph_varpool_node *n = (const struct cgraph_varpool_node *) p;
825
  return (hashval_t) DECL_UID (n->decl);
826
}
827
 
828
/* Returns nonzero if P1 and P2 are equal.  */
829
 
830
static int
831
eq_varpool_node (const void *p1, const void *p2)
832
{
833
  const struct cgraph_varpool_node *n1 =
834
    (const struct cgraph_varpool_node *) p1;
835
  const struct cgraph_varpool_node *n2 =
836
    (const struct cgraph_varpool_node *) p2;
837
  return DECL_UID (n1->decl) == DECL_UID (n2->decl);
838
}
839
 
840
/* Return cgraph_varpool node assigned to DECL.  Create new one when needed.  */
841
struct cgraph_varpool_node *
842
cgraph_varpool_node (tree decl)
843
{
844
  struct cgraph_varpool_node key, *node, **slot;
845
 
846
  gcc_assert (DECL_P (decl) && TREE_CODE (decl) != FUNCTION_DECL);
847
 
848
  if (!cgraph_varpool_hash)
849
    cgraph_varpool_hash = htab_create_ggc (10, hash_varpool_node,
850
                                           eq_varpool_node, NULL);
851
  key.decl = decl;
852
  slot = (struct cgraph_varpool_node **)
853
    htab_find_slot (cgraph_varpool_hash, &key, INSERT);
854
  if (*slot)
855
    return *slot;
856
  node = GGC_CNEW (struct cgraph_varpool_node);
857
  node->decl = decl;
858
  node->order = cgraph_order++;
859
  node->next = cgraph_varpool_nodes;
860
  cgraph_varpool_nodes = node;
861
  *slot = node;
862
  return node;
863
}
864
 
865
struct cgraph_varpool_node *
866
cgraph_varpool_node_for_asm (tree asmname)
867
{
868
  struct cgraph_varpool_node *node;
869
 
870
  for (node = cgraph_varpool_nodes; node ; node = node->next)
871
    if (decl_assembler_name_equal (node->decl, asmname))
872
      return node;
873
 
874
  return NULL;
875
}
876
 
877
/* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables.  */
878
void
879
change_decl_assembler_name (tree decl, tree name)
880
{
881
  if (!DECL_ASSEMBLER_NAME_SET_P (decl))
882
    {
883
      SET_DECL_ASSEMBLER_NAME (decl, name);
884
      return;
885
    }
886
  if (name == DECL_ASSEMBLER_NAME (decl))
887
    return;
888
 
889
  if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
890
      && DECL_RTL_SET_P (decl))
891
    warning (0, "%D renamed after being referenced in assembly", decl);
892
 
893
  SET_DECL_ASSEMBLER_NAME (decl, name);
894
}
895
 
896
/* Helper function for finalization code - add node into lists so it will
897
   be analyzed and compiled.  */
898
void
899
cgraph_varpool_enqueue_needed_node (struct cgraph_varpool_node *node)
900
{
901
  if (cgraph_varpool_last_needed_node)
902
    cgraph_varpool_last_needed_node->next_needed = node;
903
  cgraph_varpool_last_needed_node = node;
904
  node->next_needed = NULL;
905
  if (!cgraph_varpool_nodes_queue)
906
    cgraph_varpool_nodes_queue = node;
907
  if (!cgraph_varpool_first_unanalyzed_node)
908
    cgraph_varpool_first_unanalyzed_node = node;
909
  notice_global_symbol (node->decl);
910
}
911
 
912
/* Reset the queue of needed nodes.  */
913
void
914
cgraph_varpool_reset_queue (void)
915
{
916
  cgraph_varpool_last_needed_node = NULL;
917
  cgraph_varpool_nodes_queue = NULL;
918
  cgraph_varpool_first_unanalyzed_node = NULL;
919
}
920
 
921
/* Notify finalize_compilation_unit that given node is reachable
922
   or needed.  */
923
void
924
cgraph_varpool_mark_needed_node (struct cgraph_varpool_node *node)
925
{
926
  if (!node->needed && node->finalized
927
      && !TREE_ASM_WRITTEN (node->decl))
928
    cgraph_varpool_enqueue_needed_node (node);
929
  node->needed = 1;
930
}
931
 
932
/* Determine if variable DECL is needed.  That is, visible to something
933
   either outside this translation unit, something magic in the system
934
   configury, or (if not doing unit-at-a-time) to something we haven't
935
   seen yet.  */
936
 
937
bool
938
decide_is_variable_needed (struct cgraph_varpool_node *node, tree decl)
939
{
940
  /* If the user told us it is used, then it must be so.  */
941
  if (node->externally_visible)
942
    return true;
943
  if (!flag_unit_at_a_time
944
      && lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
945
    return true;
946
 
947
  /* ??? If the assembler name is set by hand, it is possible to assemble
948
     the name later after finalizing the function and the fact is noticed
949
     in assemble_name then.  This is arguably a bug.  */
950
  if (DECL_ASSEMBLER_NAME_SET_P (decl)
951
      && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
952
    return true;
953
 
954
  /* If we decided it was needed before, but at the time we didn't have
955
     the definition available, then it's still needed.  */
956
  if (node->needed)
957
    return true;
958
 
959
  /* Externally visible variables must be output.  The exception is
960
     COMDAT variables that must be output only when they are needed.  */
961
  if (TREE_PUBLIC (decl) && !flag_whole_program && !DECL_COMDAT (decl)
962
      && !DECL_EXTERNAL (decl))
963
    return true;
964
 
965
  /* When not reordering top level variables, we have to assume that
966
     we are going to keep everything.  */
967
  if (flag_unit_at_a_time && flag_toplevel_reorder)
968
    return false;
969
 
970
  /* We want to emit COMDAT variables only when absolutely necessary.  */
971
  if (DECL_COMDAT (decl))
972
    return false;
973
  return true;
974
}
975
 
976
void
977
cgraph_varpool_finalize_decl (tree decl)
978
{
979
  struct cgraph_varpool_node *node = cgraph_varpool_node (decl);
980
 
981
  /* The first declaration of a variable that comes through this function
982
     decides whether it is global (in C, has external linkage)
983
     or local (in C, has internal linkage).  So do nothing more
984
     if this function has already run.  */
985
  if (node->finalized)
986
    {
987
      if (cgraph_global_info_ready || (!flag_unit_at_a_time && !flag_openmp))
988
        cgraph_varpool_assemble_pending_decls ();
989
      return;
990
    }
991
  if (node->needed)
992
    cgraph_varpool_enqueue_needed_node (node);
993
  node->finalized = true;
994
 
995
  if (decide_is_variable_needed (node, decl))
996
    cgraph_varpool_mark_needed_node (node);
997
  /* Since we reclaim unreachable nodes at the end of every language
998
     level unit, we need to be conservative about possible entry points
999
     there.  */
1000
  else if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
1001
    cgraph_varpool_mark_needed_node (node);
1002
  if (cgraph_global_info_ready || (!flag_unit_at_a_time && !flag_openmp))
1003
    cgraph_varpool_assemble_pending_decls ();
1004
}
1005
 
1006
/* Add a top-level asm statement to the list.  */
1007
 
1008
struct cgraph_asm_node *
1009
cgraph_add_asm_node (tree asm_str)
1010
{
1011
  struct cgraph_asm_node *node;
1012
 
1013
  node = GGC_CNEW (struct cgraph_asm_node);
1014
  node->asm_str = asm_str;
1015
  node->order = cgraph_order++;
1016
  node->next = NULL;
1017
  if (cgraph_asm_nodes == NULL)
1018
    cgraph_asm_nodes = node;
1019
  else
1020
    cgraph_asm_last_node->next = node;
1021
  cgraph_asm_last_node = node;
1022
  return node;
1023
}
1024
 
1025
/* Return true when the DECL can possibly be inlined.  */
1026
bool
1027
cgraph_function_possibly_inlined_p (tree decl)
1028
{
1029
  if (!cgraph_global_info_ready)
1030
    return (DECL_INLINE (decl) && !flag_really_no_inline);
1031
  return DECL_POSSIBLY_INLINED (decl);
1032
}
1033
 
1034
/* Create clone of E in the node N represented by CALL_EXPR the callgraph.  */
1035
struct cgraph_edge *
1036
cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
1037
                   tree call_stmt, gcov_type count_scale, int loop_nest,
1038
                   bool update_original)
1039
{
1040
  struct cgraph_edge *new;
1041
 
1042
  new = cgraph_create_edge (n, e->callee, call_stmt,
1043
                            e->count * count_scale / REG_BR_PROB_BASE,
1044
                            e->loop_nest + loop_nest);
1045
 
1046
  new->inline_failed = e->inline_failed;
1047
  if (update_original)
1048
    {
1049
      e->count -= new->count;
1050
      if (e->count < 0)
1051
        e->count = 0;
1052
    }
1053
  return new;
1054
}
1055
 
1056
/* Create node representing clone of N executed COUNT times.  Decrease
1057
   the execution counts from original node too.
1058
 
1059
   When UPDATE_ORIGINAL is true, the counts are subtracted from the original
1060
   function's profile to reflect the fact that part of execution is handled
1061
   by node.  */
1062
struct cgraph_node *
1063
cgraph_clone_node (struct cgraph_node *n, gcov_type count, int loop_nest,
1064
                   bool update_original)
1065
{
1066
  struct cgraph_node *new = cgraph_create_node ();
1067
  struct cgraph_edge *e;
1068
  gcov_type count_scale;
1069
 
1070
  new->decl = n->decl;
1071
  new->origin = n->origin;
1072
  if (new->origin)
1073
    {
1074
      new->next_nested = new->origin->nested;
1075
      new->origin->nested = new;
1076
    }
1077
  new->analyzed = n->analyzed;
1078
  new->local = n->local;
1079
  new->global = n->global;
1080
  new->rtl = n->rtl;
1081
  new->master_clone = n->master_clone;
1082
  new->count = count;
1083
  if (n->count)
1084
    count_scale = new->count * REG_BR_PROB_BASE / n->count;
1085
  else
1086
    count_scale = 0;
1087
  if (update_original)
1088
    {
1089
      n->count -= count;
1090
      if (n->count < 0)
1091
        n->count = 0;
1092
    }
1093
 
1094
  for (e = n->callees;e; e=e->next_callee)
1095
    cgraph_clone_edge (e, new, e->call_stmt, count_scale, loop_nest,
1096
                       update_original);
1097
 
1098
  new->next_clone = n->next_clone;
1099
  new->prev_clone = n;
1100
  n->next_clone = new;
1101
  if (new->next_clone)
1102
    new->next_clone->prev_clone = new;
1103
 
1104
  return new;
1105
}
1106
 
1107
/* Return true if N is an master_clone, (see cgraph_master_clone).  */
1108
 
1109
bool
1110
cgraph_is_master_clone (struct cgraph_node *n)
1111
{
1112
  return (n == cgraph_master_clone (n));
1113
}
1114
 
1115
struct cgraph_node *
1116
cgraph_master_clone (struct cgraph_node *n)
1117
{
1118
  enum availability avail = cgraph_function_body_availability (n);
1119
 
1120
  if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
1121
    return NULL;
1122
 
1123
  if (!n->master_clone)
1124
    n->master_clone = cgraph_node (n->decl);
1125
 
1126
  return n->master_clone;
1127
}
1128
 
1129
/* NODE is no longer nested function; update cgraph accordingly.  */
1130
void
1131
cgraph_unnest_node (struct cgraph_node *node)
1132
{
1133
  struct cgraph_node **node2 = &node->origin->nested;
1134
  gcc_assert (node->origin);
1135
 
1136
  while (*node2 != node)
1137
    node2 = &(*node2)->next_nested;
1138
  *node2 = node->next_nested;
1139
  node->origin = NULL;
1140
}
1141
 
1142
/* Return function availability.  See cgraph.h for description of individual
1143
   return values.  */
1144
enum availability
1145
cgraph_function_body_availability (struct cgraph_node *node)
1146
{
1147
  enum availability avail;
1148
  gcc_assert (cgraph_function_flags_ready);
1149
  if (!node->analyzed)
1150
    avail = AVAIL_NOT_AVAILABLE;
1151
  else if (node->local.local)
1152
    avail = AVAIL_LOCAL;
1153
  else if (node->local.externally_visible)
1154
    avail = AVAIL_AVAILABLE;
1155
 
1156
  /* If the function can be overwritten, return OVERWRITABLE.  Take
1157
     care at least of two notable extensions - the COMDAT functions
1158
     used to share template instantiations in C++ (this is symmetric
1159
     to code cp_cannot_inline_tree_fn and probably shall be shared and
1160
     the inlinability hooks completely eliminated).
1161
 
1162
     ??? Does the C++ one definition rule allow us to always return
1163
     AVAIL_AVAILABLE here?  That would be good reason to preserve this
1164
     hook Similarly deal with extern inline functions - this is again
1165
     necessary to get C++ shared functions having keyed templates
1166
     right and in the C extension documentation we probably should
1167
     document the requirement of both versions of function (extern
1168
     inline and offline) having same side effect characteristics as
1169
     good optimization is what this optimization is about.  */
1170
 
1171
  else if (!(*targetm.binds_local_p) (node->decl)
1172
           && !DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl))
1173
    avail = AVAIL_OVERWRITABLE;
1174
  else avail = AVAIL_AVAILABLE;
1175
 
1176
  return avail;
1177
}
1178
 
1179
/* Return variable availability.  See cgraph.h for description of individual
1180
   return values.  */
1181
enum availability
1182
cgraph_variable_initializer_availability (struct cgraph_varpool_node *node)
1183
{
1184
  gcc_assert (cgraph_function_flags_ready);
1185
  if (!node->finalized)
1186
    return AVAIL_NOT_AVAILABLE;
1187
  if (!TREE_PUBLIC (node->decl))
1188
    return AVAIL_AVAILABLE;
1189
  /* If the variable can be overwritten, return OVERWRITABLE.  Takes
1190
     care of at least two notable extensions - the COMDAT variables
1191
     used to share template instantiations in C++.  */
1192
  if (!(*targetm.binds_local_p) (node->decl) && !DECL_COMDAT (node->decl))
1193
    return AVAIL_OVERWRITABLE;
1194
  return AVAIL_AVAILABLE;
1195
}
1196
 
1197
 
1198
/* Add the function FNDECL to the call graph.  FNDECL is assumed to be
1199
   in low GIMPLE form and ready to be processed by cgraph_finalize_function.
1200
 
1201
   When operating in unit-at-a-time, a new callgraph node is added to
1202
   CGRAPH_EXPAND_QUEUE, which is processed after all the original
1203
   functions in the call graph .
1204
 
1205
   When not in unit-at-a-time, the new callgraph node is added to
1206
   CGRAPH_NODES_QUEUE for cgraph_assemble_pending_functions to
1207
   process.  */
1208
 
1209
void
1210
cgraph_add_new_function (tree fndecl)
1211
{
1212
  struct cgraph_node *n = cgraph_node (fndecl);
1213
  n->next_needed = cgraph_expand_queue;
1214
  cgraph_expand_queue = n;
1215
}
1216
 
1217
#include "gt-cgraph.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.