| 1 |
684 |
jeremybenn |
/* Write and read the cgraph to the memory mapped representation of a
|
| 2 |
|
|
.o file.
|
| 3 |
|
|
|
| 4 |
|
|
Copyright 2009, 2010, 2011 Free Software Foundation, Inc.
|
| 5 |
|
|
Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
|
| 6 |
|
|
|
| 7 |
|
|
This file is part of GCC.
|
| 8 |
|
|
|
| 9 |
|
|
GCC is free software; you can redistribute it and/or modify it under
|
| 10 |
|
|
the terms of the GNU General Public License as published by the Free
|
| 11 |
|
|
Software Foundation; either version 3, or (at your option) any later
|
| 12 |
|
|
version.
|
| 13 |
|
|
|
| 14 |
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
| 15 |
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
| 16 |
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
| 17 |
|
|
for more details.
|
| 18 |
|
|
|
| 19 |
|
|
You should have received a copy of the GNU General Public License
|
| 20 |
|
|
along with GCC; see the file COPYING3. If not see
|
| 21 |
|
|
<http://www.gnu.org/licenses/>. */
|
| 22 |
|
|
|
| 23 |
|
|
#include "config.h"
|
| 24 |
|
|
#include "system.h"
|
| 25 |
|
|
#include "coretypes.h"
|
| 26 |
|
|
#include "tm.h"
|
| 27 |
|
|
#include "tree.h"
|
| 28 |
|
|
#include "expr.h"
|
| 29 |
|
|
#include "flags.h"
|
| 30 |
|
|
#include "params.h"
|
| 31 |
|
|
#include "input.h"
|
| 32 |
|
|
#include "hashtab.h"
|
| 33 |
|
|
#include "langhooks.h"
|
| 34 |
|
|
#include "basic-block.h"
|
| 35 |
|
|
#include "tree-flow.h"
|
| 36 |
|
|
#include "cgraph.h"
|
| 37 |
|
|
#include "function.h"
|
| 38 |
|
|
#include "ggc.h"
|
| 39 |
|
|
#include "diagnostic-core.h"
|
| 40 |
|
|
#include "except.h"
|
| 41 |
|
|
#include "vec.h"
|
| 42 |
|
|
#include "timevar.h"
|
| 43 |
|
|
#include "output.h"
|
| 44 |
|
|
#include "pointer-set.h"
|
| 45 |
|
|
#include "lto-streamer.h"
|
| 46 |
|
|
#include "data-streamer.h"
|
| 47 |
|
|
#include "tree-streamer.h"
|
| 48 |
|
|
#include "gcov-io.h"
|
| 49 |
|
|
|
| 50 |
|
|
static void output_varpool (cgraph_node_set, varpool_node_set);
|
| 51 |
|
|
static void output_cgraph_opt_summary (cgraph_node_set set);
|
| 52 |
|
|
static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
|
| 53 |
|
|
|
| 54 |
|
|
/* Number of LDPR values known to GCC. */
|
| 55 |
|
|
#define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
|
| 56 |
|
|
|
| 57 |
|
|
/* All node orders are ofsetted by ORDER_BASE. */
|
| 58 |
|
|
static int order_base;
|
| 59 |
|
|
|
| 60 |
|
|
/* Cgraph streaming is organized as set of record whose type
|
| 61 |
|
|
is indicated by a tag. */
|
| 62 |
|
|
enum LTO_cgraph_tags
|
| 63 |
|
|
{
|
| 64 |
|
|
/* Must leave 0 for the stopper. */
|
| 65 |
|
|
|
| 66 |
|
|
/* Cgraph node without body available. */
|
| 67 |
|
|
LTO_cgraph_unavail_node = 1,
|
| 68 |
|
|
/* Cgraph node with function body. */
|
| 69 |
|
|
LTO_cgraph_analyzed_node,
|
| 70 |
|
|
/* Cgraph edges. */
|
| 71 |
|
|
LTO_cgraph_edge,
|
| 72 |
|
|
LTO_cgraph_indirect_edge,
|
| 73 |
|
|
LTO_cgraph_last_tag
|
| 74 |
|
|
};
|
| 75 |
|
|
|
| 76 |
|
|
/* Create a new cgraph encoder. */
|
| 77 |
|
|
|
| 78 |
|
|
lto_cgraph_encoder_t
|
| 79 |
|
|
lto_cgraph_encoder_new (void)
|
| 80 |
|
|
{
|
| 81 |
|
|
lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
|
| 82 |
|
|
encoder->map = pointer_map_create ();
|
| 83 |
|
|
encoder->nodes = NULL;
|
| 84 |
|
|
encoder->body = pointer_set_create ();
|
| 85 |
|
|
return encoder;
|
| 86 |
|
|
}
|
| 87 |
|
|
|
| 88 |
|
|
|
| 89 |
|
|
/* Delete ENCODER and its components. */
|
| 90 |
|
|
|
| 91 |
|
|
void
|
| 92 |
|
|
lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
|
| 93 |
|
|
{
|
| 94 |
|
|
VEC_free (cgraph_node_ptr, heap, encoder->nodes);
|
| 95 |
|
|
pointer_map_destroy (encoder->map);
|
| 96 |
|
|
pointer_set_destroy (encoder->body);
|
| 97 |
|
|
free (encoder);
|
| 98 |
|
|
}
|
| 99 |
|
|
|
| 100 |
|
|
|
| 101 |
|
|
/* Return the existing reference number of NODE in the cgraph encoder in
|
| 102 |
|
|
output block OB. Assign a new reference if this is the first time
|
| 103 |
|
|
NODE is encoded. */
|
| 104 |
|
|
|
| 105 |
|
|
int
|
| 106 |
|
|
lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
|
| 107 |
|
|
struct cgraph_node *node)
|
| 108 |
|
|
{
|
| 109 |
|
|
int ref;
|
| 110 |
|
|
void **slot;
|
| 111 |
|
|
|
| 112 |
|
|
slot = pointer_map_contains (encoder->map, node);
|
| 113 |
|
|
if (!slot)
|
| 114 |
|
|
{
|
| 115 |
|
|
ref = VEC_length (cgraph_node_ptr, encoder->nodes);
|
| 116 |
|
|
slot = pointer_map_insert (encoder->map, node);
|
| 117 |
|
|
*slot = (void *) (intptr_t) ref;
|
| 118 |
|
|
VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
|
| 119 |
|
|
}
|
| 120 |
|
|
else
|
| 121 |
|
|
ref = (int) (intptr_t) *slot;
|
| 122 |
|
|
|
| 123 |
|
|
return ref;
|
| 124 |
|
|
}
|
| 125 |
|
|
|
| 126 |
|
|
#define LCC_NOT_FOUND (-1)
|
| 127 |
|
|
|
| 128 |
|
|
/* Look up NODE in encoder. Return NODE's reference if it has been encoded
|
| 129 |
|
|
or LCC_NOT_FOUND if it is not there. */
|
| 130 |
|
|
|
| 131 |
|
|
int
|
| 132 |
|
|
lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
|
| 133 |
|
|
struct cgraph_node *node)
|
| 134 |
|
|
{
|
| 135 |
|
|
void **slot = pointer_map_contains (encoder->map, node);
|
| 136 |
|
|
return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
|
| 137 |
|
|
}
|
| 138 |
|
|
|
| 139 |
|
|
|
| 140 |
|
|
/* Return the cgraph node corresponding to REF using ENCODER. */
|
| 141 |
|
|
|
| 142 |
|
|
struct cgraph_node *
|
| 143 |
|
|
lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
|
| 144 |
|
|
{
|
| 145 |
|
|
if (ref == LCC_NOT_FOUND)
|
| 146 |
|
|
return NULL;
|
| 147 |
|
|
|
| 148 |
|
|
return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
|
| 149 |
|
|
}
|
| 150 |
|
|
|
| 151 |
|
|
|
| 152 |
|
|
/* Return TRUE if we should encode initializer of NODE (if any). */
|
| 153 |
|
|
|
| 154 |
|
|
bool
|
| 155 |
|
|
lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
|
| 156 |
|
|
struct cgraph_node *node)
|
| 157 |
|
|
{
|
| 158 |
|
|
return pointer_set_contains (encoder->body, node);
|
| 159 |
|
|
}
|
| 160 |
|
|
|
| 161 |
|
|
/* Return TRUE if we should encode body of NODE (if any). */
|
| 162 |
|
|
|
| 163 |
|
|
static void
|
| 164 |
|
|
lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
|
| 165 |
|
|
struct cgraph_node *node)
|
| 166 |
|
|
{
|
| 167 |
|
|
pointer_set_insert (encoder->body, node);
|
| 168 |
|
|
}
|
| 169 |
|
|
|
| 170 |
|
|
/* Create a new varpool encoder. */
|
| 171 |
|
|
|
| 172 |
|
|
lto_varpool_encoder_t
|
| 173 |
|
|
lto_varpool_encoder_new (void)
|
| 174 |
|
|
{
|
| 175 |
|
|
lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
|
| 176 |
|
|
encoder->map = pointer_map_create ();
|
| 177 |
|
|
encoder->initializer = pointer_set_create ();
|
| 178 |
|
|
encoder->nodes = NULL;
|
| 179 |
|
|
return encoder;
|
| 180 |
|
|
}
|
| 181 |
|
|
|
| 182 |
|
|
|
| 183 |
|
|
/* Delete ENCODER and its components. */
|
| 184 |
|
|
|
| 185 |
|
|
void
|
| 186 |
|
|
lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
|
| 187 |
|
|
{
|
| 188 |
|
|
VEC_free (varpool_node_ptr, heap, encoder->nodes);
|
| 189 |
|
|
pointer_map_destroy (encoder->map);
|
| 190 |
|
|
pointer_set_destroy (encoder->initializer);
|
| 191 |
|
|
free (encoder);
|
| 192 |
|
|
}
|
| 193 |
|
|
|
| 194 |
|
|
|
| 195 |
|
|
/* Return the existing reference number of NODE in the varpool encoder in
|
| 196 |
|
|
output block OB. Assign a new reference if this is the first time
|
| 197 |
|
|
NODE is encoded. */
|
| 198 |
|
|
|
| 199 |
|
|
int
|
| 200 |
|
|
lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
|
| 201 |
|
|
struct varpool_node *node)
|
| 202 |
|
|
{
|
| 203 |
|
|
int ref;
|
| 204 |
|
|
void **slot;
|
| 205 |
|
|
|
| 206 |
|
|
slot = pointer_map_contains (encoder->map, node);
|
| 207 |
|
|
if (!slot)
|
| 208 |
|
|
{
|
| 209 |
|
|
ref = VEC_length (varpool_node_ptr, encoder->nodes);
|
| 210 |
|
|
slot = pointer_map_insert (encoder->map, node);
|
| 211 |
|
|
*slot = (void *) (intptr_t) ref;
|
| 212 |
|
|
VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
|
| 213 |
|
|
}
|
| 214 |
|
|
else
|
| 215 |
|
|
ref = (int) (intptr_t) *slot;
|
| 216 |
|
|
|
| 217 |
|
|
return ref;
|
| 218 |
|
|
}
|
| 219 |
|
|
|
| 220 |
|
|
/* Look up NODE in encoder. Return NODE's reference if it has been encoded
|
| 221 |
|
|
or LCC_NOT_FOUND if it is not there. */
|
| 222 |
|
|
|
| 223 |
|
|
int
|
| 224 |
|
|
lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
|
| 225 |
|
|
struct varpool_node *node)
|
| 226 |
|
|
{
|
| 227 |
|
|
void **slot = pointer_map_contains (encoder->map, node);
|
| 228 |
|
|
return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
|
| 229 |
|
|
}
|
| 230 |
|
|
|
| 231 |
|
|
|
| 232 |
|
|
/* Return the varpool node corresponding to REF using ENCODER. */
|
| 233 |
|
|
|
| 234 |
|
|
struct varpool_node *
|
| 235 |
|
|
lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
|
| 236 |
|
|
{
|
| 237 |
|
|
if (ref == LCC_NOT_FOUND)
|
| 238 |
|
|
return NULL;
|
| 239 |
|
|
|
| 240 |
|
|
return VEC_index (varpool_node_ptr, encoder->nodes, ref);
|
| 241 |
|
|
}
|
| 242 |
|
|
|
| 243 |
|
|
|
| 244 |
|
|
/* Return TRUE if we should encode initializer of NODE (if any). */
|
| 245 |
|
|
|
| 246 |
|
|
bool
|
| 247 |
|
|
lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
|
| 248 |
|
|
struct varpool_node *node)
|
| 249 |
|
|
{
|
| 250 |
|
|
return pointer_set_contains (encoder->initializer, node);
|
| 251 |
|
|
}
|
| 252 |
|
|
|
| 253 |
|
|
/* Return TRUE if we should encode initializer of NODE (if any). */
|
| 254 |
|
|
|
| 255 |
|
|
static void
|
| 256 |
|
|
lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
|
| 257 |
|
|
struct varpool_node *node)
|
| 258 |
|
|
{
|
| 259 |
|
|
pointer_set_insert (encoder->initializer, node);
|
| 260 |
|
|
}
|
| 261 |
|
|
|
| 262 |
|
|
/* Output the cgraph EDGE to OB using ENCODER. */
|
| 263 |
|
|
|
| 264 |
|
|
static void
|
| 265 |
|
|
lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
|
| 266 |
|
|
lto_cgraph_encoder_t encoder)
|
| 267 |
|
|
{
|
| 268 |
|
|
unsigned int uid;
|
| 269 |
|
|
intptr_t ref;
|
| 270 |
|
|
struct bitpack_d bp;
|
| 271 |
|
|
|
| 272 |
|
|
if (edge->indirect_unknown_callee)
|
| 273 |
|
|
streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
|
| 274 |
|
|
LTO_cgraph_indirect_edge);
|
| 275 |
|
|
else
|
| 276 |
|
|
streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
|
| 277 |
|
|
LTO_cgraph_edge);
|
| 278 |
|
|
|
| 279 |
|
|
ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
|
| 280 |
|
|
gcc_assert (ref != LCC_NOT_FOUND);
|
| 281 |
|
|
streamer_write_hwi_stream (ob->main_stream, ref);
|
| 282 |
|
|
|
| 283 |
|
|
if (!edge->indirect_unknown_callee)
|
| 284 |
|
|
{
|
| 285 |
|
|
ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
|
| 286 |
|
|
gcc_assert (ref != LCC_NOT_FOUND);
|
| 287 |
|
|
streamer_write_hwi_stream (ob->main_stream, ref);
|
| 288 |
|
|
}
|
| 289 |
|
|
|
| 290 |
|
|
streamer_write_hwi_stream (ob->main_stream, edge->count);
|
| 291 |
|
|
|
| 292 |
|
|
bp = bitpack_create (ob->main_stream);
|
| 293 |
|
|
uid = (!gimple_has_body_p (edge->caller->decl)
|
| 294 |
|
|
? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
|
| 295 |
|
|
bp_pack_enum (&bp, cgraph_inline_failed_enum,
|
| 296 |
|
|
CIF_N_REASONS, edge->inline_failed);
|
| 297 |
|
|
bp_pack_var_len_unsigned (&bp, uid);
|
| 298 |
|
|
bp_pack_var_len_unsigned (&bp, edge->frequency);
|
| 299 |
|
|
bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
|
| 300 |
|
|
bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
|
| 301 |
|
|
bp_pack_value (&bp, edge->can_throw_external, 1);
|
| 302 |
|
|
if (edge->indirect_unknown_callee)
|
| 303 |
|
|
{
|
| 304 |
|
|
int flags = edge->indirect_info->ecf_flags;
|
| 305 |
|
|
bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
|
| 306 |
|
|
bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
|
| 307 |
|
|
bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
|
| 308 |
|
|
bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
|
| 309 |
|
|
bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
|
| 310 |
|
|
bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
|
| 311 |
|
|
/* Flags that should not appear on indirect calls. */
|
| 312 |
|
|
gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
|
| 313 |
|
|
| ECF_MAY_BE_ALLOCA
|
| 314 |
|
|
| ECF_SIBCALL
|
| 315 |
|
|
| ECF_LEAF
|
| 316 |
|
|
| ECF_NOVOPS)));
|
| 317 |
|
|
}
|
| 318 |
|
|
streamer_write_bitpack (&bp);
|
| 319 |
|
|
}
|
| 320 |
|
|
|
| 321 |
|
|
/* Return if LIST contain references from other partitions. */
|
| 322 |
|
|
|
| 323 |
|
|
bool
|
| 324 |
|
|
referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
|
| 325 |
|
|
varpool_node_set vset)
|
| 326 |
|
|
{
|
| 327 |
|
|
int i;
|
| 328 |
|
|
struct ipa_ref *ref;
|
| 329 |
|
|
for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
|
| 330 |
|
|
{
|
| 331 |
|
|
if (ref->refering_type == IPA_REF_CGRAPH)
|
| 332 |
|
|
{
|
| 333 |
|
|
if (ipa_ref_refering_node (ref)->in_other_partition
|
| 334 |
|
|
|| !cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
|
| 335 |
|
|
return true;
|
| 336 |
|
|
}
|
| 337 |
|
|
else
|
| 338 |
|
|
{
|
| 339 |
|
|
if (ipa_ref_refering_varpool_node (ref)->in_other_partition
|
| 340 |
|
|
|| !varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
|
| 341 |
|
|
vset))
|
| 342 |
|
|
return true;
|
| 343 |
|
|
}
|
| 344 |
|
|
}
|
| 345 |
|
|
return false;
|
| 346 |
|
|
}
|
| 347 |
|
|
|
| 348 |
|
|
/* Return true when node is reachable from other partition. */
|
| 349 |
|
|
|
| 350 |
|
|
bool
|
| 351 |
|
|
reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
|
| 352 |
|
|
{
|
| 353 |
|
|
struct cgraph_edge *e;
|
| 354 |
|
|
if (!node->analyzed)
|
| 355 |
|
|
return false;
|
| 356 |
|
|
if (node->global.inlined_to)
|
| 357 |
|
|
return false;
|
| 358 |
|
|
for (e = node->callers; e; e = e->next_caller)
|
| 359 |
|
|
if (e->caller->in_other_partition
|
| 360 |
|
|
|| !cgraph_node_in_set_p (e->caller, set))
|
| 361 |
|
|
return true;
|
| 362 |
|
|
return false;
|
| 363 |
|
|
}
|
| 364 |
|
|
|
| 365 |
|
|
/* Return if LIST contain references from other partitions. */
|
| 366 |
|
|
|
| 367 |
|
|
bool
|
| 368 |
|
|
referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
|
| 369 |
|
|
varpool_node_set vset)
|
| 370 |
|
|
{
|
| 371 |
|
|
int i;
|
| 372 |
|
|
struct ipa_ref *ref;
|
| 373 |
|
|
for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
|
| 374 |
|
|
{
|
| 375 |
|
|
if (ref->refering_type == IPA_REF_CGRAPH)
|
| 376 |
|
|
{
|
| 377 |
|
|
if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
|
| 378 |
|
|
return true;
|
| 379 |
|
|
}
|
| 380 |
|
|
else
|
| 381 |
|
|
{
|
| 382 |
|
|
if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
|
| 383 |
|
|
vset))
|
| 384 |
|
|
return true;
|
| 385 |
|
|
}
|
| 386 |
|
|
}
|
| 387 |
|
|
return false;
|
| 388 |
|
|
}
|
| 389 |
|
|
|
| 390 |
|
|
/* Return true when node is reachable from other partition. */
|
| 391 |
|
|
|
| 392 |
|
|
bool
|
| 393 |
|
|
reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
|
| 394 |
|
|
{
|
| 395 |
|
|
struct cgraph_edge *e;
|
| 396 |
|
|
for (e = node->callers; e; e = e->next_caller)
|
| 397 |
|
|
if (cgraph_node_in_set_p (e->caller, set))
|
| 398 |
|
|
return true;
|
| 399 |
|
|
return false;
|
| 400 |
|
|
}
|
| 401 |
|
|
|
| 402 |
|
|
/* Output the cgraph NODE to OB. ENCODER is used to find the
|
| 403 |
|
|
reference number of NODE->inlined_to. SET is the set of nodes we
|
| 404 |
|
|
are writing to the current file. If NODE is not in SET, then NODE
|
| 405 |
|
|
is a boundary of a cgraph_node_set and we pretend NODE just has a
|
| 406 |
|
|
decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
|
| 407 |
|
|
that have had their callgraph node written so far. This is used to
|
| 408 |
|
|
determine if NODE is a clone of a previously written node. */
|
| 409 |
|
|
|
| 410 |
|
|
static void
|
| 411 |
|
|
lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
|
| 412 |
|
|
lto_cgraph_encoder_t encoder, cgraph_node_set set,
|
| 413 |
|
|
varpool_node_set vset)
|
| 414 |
|
|
{
|
| 415 |
|
|
unsigned int tag;
|
| 416 |
|
|
struct bitpack_d bp;
|
| 417 |
|
|
bool boundary_p;
|
| 418 |
|
|
intptr_t ref;
|
| 419 |
|
|
bool in_other_partition = false;
|
| 420 |
|
|
struct cgraph_node *clone_of;
|
| 421 |
|
|
|
| 422 |
|
|
boundary_p = !cgraph_node_in_set_p (node, set);
|
| 423 |
|
|
|
| 424 |
|
|
if (node->analyzed && !boundary_p)
|
| 425 |
|
|
tag = LTO_cgraph_analyzed_node;
|
| 426 |
|
|
else
|
| 427 |
|
|
tag = LTO_cgraph_unavail_node;
|
| 428 |
|
|
|
| 429 |
|
|
streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
|
| 430 |
|
|
tag);
|
| 431 |
|
|
streamer_write_hwi_stream (ob->main_stream, node->order);
|
| 432 |
|
|
|
| 433 |
|
|
/* In WPA mode, we only output part of the call-graph. Also, we
|
| 434 |
|
|
fake cgraph node attributes. There are two cases that we care.
|
| 435 |
|
|
|
| 436 |
|
|
Boundary nodes: There are nodes that are not part of SET but are
|
| 437 |
|
|
called from within SET. We artificially make them look like
|
| 438 |
|
|
externally visible nodes with no function body.
|
| 439 |
|
|
|
| 440 |
|
|
Cherry-picked nodes: These are nodes we pulled from other
|
| 441 |
|
|
translation units into SET during IPA-inlining. We make them as
|
| 442 |
|
|
local static nodes to prevent clashes with other local statics. */
|
| 443 |
|
|
if (boundary_p && node->analyzed)
|
| 444 |
|
|
{
|
| 445 |
|
|
/* Inline clones can not be part of boundary.
|
| 446 |
|
|
gcc_assert (!node->global.inlined_to);
|
| 447 |
|
|
|
| 448 |
|
|
FIXME: At the moment they can be, when partition contains an inline
|
| 449 |
|
|
clone that is clone of inline clone from outside partition. We can
|
| 450 |
|
|
reshape the clone tree and make other tree to be the root, but it
|
| 451 |
|
|
needs a bit extra work and will be promplty done by cgraph_remove_node
|
| 452 |
|
|
after reading back. */
|
| 453 |
|
|
in_other_partition = 1;
|
| 454 |
|
|
}
|
| 455 |
|
|
|
| 456 |
|
|
clone_of = node->clone_of;
|
| 457 |
|
|
while (clone_of
|
| 458 |
|
|
&& (ref = lto_cgraph_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
|
| 459 |
|
|
if (clone_of->prev_sibling_clone)
|
| 460 |
|
|
clone_of = clone_of->prev_sibling_clone;
|
| 461 |
|
|
else
|
| 462 |
|
|
clone_of = clone_of->clone_of;
|
| 463 |
|
|
|
| 464 |
|
|
if (LTO_cgraph_analyzed_node)
|
| 465 |
|
|
gcc_assert (clone_of || !node->clone_of);
|
| 466 |
|
|
if (!clone_of)
|
| 467 |
|
|
streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
|
| 468 |
|
|
else
|
| 469 |
|
|
streamer_write_hwi_stream (ob->main_stream, ref);
|
| 470 |
|
|
|
| 471 |
|
|
|
| 472 |
|
|
lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
|
| 473 |
|
|
streamer_write_hwi_stream (ob->main_stream, node->count);
|
| 474 |
|
|
streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
|
| 475 |
|
|
|
| 476 |
|
|
if (tag == LTO_cgraph_analyzed_node)
|
| 477 |
|
|
{
|
| 478 |
|
|
if (node->global.inlined_to)
|
| 479 |
|
|
{
|
| 480 |
|
|
ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
|
| 481 |
|
|
gcc_assert (ref != LCC_NOT_FOUND);
|
| 482 |
|
|
}
|
| 483 |
|
|
else
|
| 484 |
|
|
ref = LCC_NOT_FOUND;
|
| 485 |
|
|
|
| 486 |
|
|
streamer_write_hwi_stream (ob->main_stream, ref);
|
| 487 |
|
|
}
|
| 488 |
|
|
|
| 489 |
|
|
if (node->same_comdat_group && !boundary_p)
|
| 490 |
|
|
{
|
| 491 |
|
|
ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
|
| 492 |
|
|
gcc_assert (ref != LCC_NOT_FOUND);
|
| 493 |
|
|
}
|
| 494 |
|
|
else
|
| 495 |
|
|
ref = LCC_NOT_FOUND;
|
| 496 |
|
|
streamer_write_hwi_stream (ob->main_stream, ref);
|
| 497 |
|
|
|
| 498 |
|
|
bp = bitpack_create (ob->main_stream);
|
| 499 |
|
|
bp_pack_value (&bp, node->local.local, 1);
|
| 500 |
|
|
bp_pack_value (&bp, node->local.externally_visible, 1);
|
| 501 |
|
|
bp_pack_value (&bp, node->local.finalized, 1);
|
| 502 |
|
|
bp_pack_value (&bp, node->local.versionable, 1);
|
| 503 |
|
|
bp_pack_value (&bp, node->local.can_change_signature, 1);
|
| 504 |
|
|
bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
|
| 505 |
|
|
bp_pack_value (&bp, node->needed, 1);
|
| 506 |
|
|
bp_pack_value (&bp, node->address_taken, 1);
|
| 507 |
|
|
bp_pack_value (&bp, node->abstract_and_needed, 1);
|
| 508 |
|
|
bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
|
| 509 |
|
|
&& !DECL_EXTERNAL (node->decl)
|
| 510 |
|
|
&& !DECL_COMDAT (node->decl)
|
| 511 |
|
|
&& (reachable_from_other_partition_p (node, set)
|
| 512 |
|
|
|| referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
|
| 513 |
|
|
bp_pack_value (&bp, node->lowered, 1);
|
| 514 |
|
|
bp_pack_value (&bp, in_other_partition, 1);
|
| 515 |
|
|
/* Real aliases in a boundary become non-aliases. However we still stream
|
| 516 |
|
|
alias info on weakrefs.
|
| 517 |
|
|
TODO: We lose a bit of information here - when we know that variable is
|
| 518 |
|
|
defined in other unit, we may use the info on aliases to resolve
|
| 519 |
|
|
symbol1 != symbol2 type tests that we can do only for locally defined objects
|
| 520 |
|
|
otherwise. */
|
| 521 |
|
|
bp_pack_value (&bp, node->alias && (!boundary_p || DECL_EXTERNAL (node->decl)), 1);
|
| 522 |
|
|
bp_pack_value (&bp, node->frequency, 2);
|
| 523 |
|
|
bp_pack_value (&bp, node->only_called_at_startup, 1);
|
| 524 |
|
|
bp_pack_value (&bp, node->only_called_at_exit, 1);
|
| 525 |
|
|
bp_pack_value (&bp, node->tm_clone, 1);
|
| 526 |
|
|
bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
|
| 527 |
|
|
bp_pack_enum (&bp, ld_plugin_symbol_resolution,
|
| 528 |
|
|
LDPR_NUM_KNOWN, node->resolution);
|
| 529 |
|
|
streamer_write_bitpack (&bp);
|
| 530 |
|
|
|
| 531 |
|
|
if (node->thunk.thunk_p && !boundary_p)
|
| 532 |
|
|
{
|
| 533 |
|
|
streamer_write_uhwi_stream
|
| 534 |
|
|
(ob->main_stream,
|
| 535 |
|
|
1 + (node->thunk.this_adjusting != 0) * 2
|
| 536 |
|
|
+ (node->thunk.virtual_offset_p != 0) * 4);
|
| 537 |
|
|
streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
|
| 538 |
|
|
streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
|
| 539 |
|
|
}
|
| 540 |
|
|
if ((node->alias || node->thunk.thunk_p)
|
| 541 |
|
|
&& (!boundary_p || (node->alias && DECL_EXTERNAL (node->decl))))
|
| 542 |
|
|
{
|
| 543 |
|
|
streamer_write_hwi_in_range (ob->main_stream, 0, 1,
|
| 544 |
|
|
node->thunk.alias != NULL);
|
| 545 |
|
|
if (node->thunk.alias != NULL)
|
| 546 |
|
|
lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
|
| 547 |
|
|
node->thunk.alias);
|
| 548 |
|
|
}
|
| 549 |
|
|
}
|
| 550 |
|
|
|
| 551 |
|
|
/* Output the varpool NODE to OB.
|
| 552 |
|
|
If NODE is not in SET, then NODE is a boundary. */
|
| 553 |
|
|
|
| 554 |
|
|
static void
|
| 555 |
|
|
lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
|
| 556 |
|
|
lto_varpool_encoder_t varpool_encoder,
|
| 557 |
|
|
cgraph_node_set set, varpool_node_set vset)
|
| 558 |
|
|
{
|
| 559 |
|
|
bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
|
| 560 |
|
|
struct bitpack_d bp;
|
| 561 |
|
|
int ref;
|
| 562 |
|
|
|
| 563 |
|
|
streamer_write_hwi_stream (ob->main_stream, node->order);
|
| 564 |
|
|
lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
|
| 565 |
|
|
bp = bitpack_create (ob->main_stream);
|
| 566 |
|
|
bp_pack_value (&bp, node->externally_visible, 1);
|
| 567 |
|
|
bp_pack_value (&bp, node->force_output, 1);
|
| 568 |
|
|
bp_pack_value (&bp, node->finalized, 1);
|
| 569 |
|
|
bp_pack_value (&bp, node->alias, 1);
|
| 570 |
|
|
bp_pack_value (&bp, node->alias_of != NULL, 1);
|
| 571 |
|
|
gcc_assert (node->finalized || !node->analyzed);
|
| 572 |
|
|
gcc_assert (node->needed);
|
| 573 |
|
|
/* Constant pool initializers can be de-unified into individual ltrans units.
|
| 574 |
|
|
FIXME: Alternatively at -Os we may want to avoid generating for them the local
|
| 575 |
|
|
labels and share them across LTRANS partitions. */
|
| 576 |
|
|
if (DECL_IN_CONSTANT_POOL (node->decl)
|
| 577 |
|
|
&& !DECL_COMDAT (node->decl))
|
| 578 |
|
|
{
|
| 579 |
|
|
bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
|
| 580 |
|
|
bp_pack_value (&bp, 0, 1); /* in_other_partition. */
|
| 581 |
|
|
}
|
| 582 |
|
|
else
|
| 583 |
|
|
{
|
| 584 |
|
|
bp_pack_value (&bp, node->analyzed
|
| 585 |
|
|
&& referenced_from_other_partition_p (&node->ref_list,
|
| 586 |
|
|
set, vset), 1);
|
| 587 |
|
|
bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
|
| 588 |
|
|
}
|
| 589 |
|
|
streamer_write_bitpack (&bp);
|
| 590 |
|
|
if (node->alias_of)
|
| 591 |
|
|
lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->alias_of);
|
| 592 |
|
|
if (node->same_comdat_group && !boundary_p)
|
| 593 |
|
|
{
|
| 594 |
|
|
ref = lto_varpool_encoder_lookup (varpool_encoder, node->same_comdat_group);
|
| 595 |
|
|
gcc_assert (ref != LCC_NOT_FOUND);
|
| 596 |
|
|
}
|
| 597 |
|
|
else
|
| 598 |
|
|
ref = LCC_NOT_FOUND;
|
| 599 |
|
|
streamer_write_hwi_stream (ob->main_stream, ref);
|
| 600 |
|
|
streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
|
| 601 |
|
|
LDPR_NUM_KNOWN, node->resolution);
|
| 602 |
|
|
}
|
| 603 |
|
|
|
| 604 |
|
|
/* Output the varpool NODE to OB.
|
| 605 |
|
|
If NODE is not in SET, then NODE is a boundary. */
|
| 606 |
|
|
|
| 607 |
|
|
static void
|
| 608 |
|
|
lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
|
| 609 |
|
|
lto_cgraph_encoder_t encoder,
|
| 610 |
|
|
lto_varpool_encoder_t varpool_encoder)
|
| 611 |
|
|
{
|
| 612 |
|
|
struct bitpack_d bp;
|
| 613 |
|
|
bp = bitpack_create (ob->main_stream);
|
| 614 |
|
|
bp_pack_value (&bp, ref->refered_type, 1);
|
| 615 |
|
|
bp_pack_value (&bp, ref->use, 2);
|
| 616 |
|
|
streamer_write_bitpack (&bp);
|
| 617 |
|
|
if (ref->refered_type == IPA_REF_CGRAPH)
|
| 618 |
|
|
{
|
| 619 |
|
|
int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
|
| 620 |
|
|
gcc_assert (nref != LCC_NOT_FOUND);
|
| 621 |
|
|
streamer_write_hwi_stream (ob->main_stream, nref);
|
| 622 |
|
|
}
|
| 623 |
|
|
else
|
| 624 |
|
|
{
|
| 625 |
|
|
int nref = lto_varpool_encoder_lookup (varpool_encoder,
|
| 626 |
|
|
ipa_ref_varpool_node (ref));
|
| 627 |
|
|
gcc_assert (nref != LCC_NOT_FOUND);
|
| 628 |
|
|
streamer_write_hwi_stream (ob->main_stream, nref);
|
| 629 |
|
|
}
|
| 630 |
|
|
}
|
| 631 |
|
|
|
| 632 |
|
|
/* Stream out profile_summary to OB. */
|
| 633 |
|
|
|
| 634 |
|
|
static void
|
| 635 |
|
|
output_profile_summary (struct lto_simple_output_block *ob)
|
| 636 |
|
|
{
|
| 637 |
|
|
if (profile_info)
|
| 638 |
|
|
{
|
| 639 |
|
|
/* We do not output num, sum_all and run_max, they are not used by
|
| 640 |
|
|
GCC profile feedback and they are difficult to merge from multiple
|
| 641 |
|
|
units. */
|
| 642 |
|
|
gcc_assert (profile_info->runs);
|
| 643 |
|
|
streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
|
| 644 |
|
|
streamer_write_uhwi_stream (ob->main_stream, profile_info->sum_max);
|
| 645 |
|
|
}
|
| 646 |
|
|
else
|
| 647 |
|
|
streamer_write_uhwi_stream (ob->main_stream, 0);
|
| 648 |
|
|
}
|
| 649 |
|
|
|
| 650 |
|
|
/* Add NODE into encoder as well as nodes it is cloned from.
|
| 651 |
|
|
Do it in a way so clones appear first. */
|
| 652 |
|
|
|
| 653 |
|
|
static void
|
| 654 |
|
|
add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
|
| 655 |
|
|
bool include_body)
|
| 656 |
|
|
{
|
| 657 |
|
|
if (node->clone_of)
|
| 658 |
|
|
add_node_to (encoder, node->clone_of, include_body);
|
| 659 |
|
|
else if (include_body)
|
| 660 |
|
|
lto_set_cgraph_encoder_encode_body (encoder, node);
|
| 661 |
|
|
lto_cgraph_encoder_encode (encoder, node);
|
| 662 |
|
|
}
|
| 663 |
|
|
|
| 664 |
|
|
/* Add all references in LIST to encoders. */
|
| 665 |
|
|
|
| 666 |
|
|
static void
|
| 667 |
|
|
add_references (lto_cgraph_encoder_t encoder,
|
| 668 |
|
|
lto_varpool_encoder_t varpool_encoder,
|
| 669 |
|
|
struct ipa_ref_list *list)
|
| 670 |
|
|
{
|
| 671 |
|
|
int i;
|
| 672 |
|
|
struct ipa_ref *ref;
|
| 673 |
|
|
for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
|
| 674 |
|
|
if (ref->refered_type == IPA_REF_CGRAPH)
|
| 675 |
|
|
add_node_to (encoder, ipa_ref_node (ref), false);
|
| 676 |
|
|
else
|
| 677 |
|
|
{
|
| 678 |
|
|
struct varpool_node *vnode = ipa_ref_varpool_node (ref);
|
| 679 |
|
|
lto_varpool_encoder_encode (varpool_encoder, vnode);
|
| 680 |
|
|
}
|
| 681 |
|
|
}
|
| 682 |
|
|
|
| 683 |
|
|
/* Output all callees or indirect outgoing edges. EDGE must be the first such
|
| 684 |
|
|
edge. */
|
| 685 |
|
|
|
| 686 |
|
|
static void
|
| 687 |
|
|
output_outgoing_cgraph_edges (struct cgraph_edge *edge,
|
| 688 |
|
|
struct lto_simple_output_block *ob,
|
| 689 |
|
|
lto_cgraph_encoder_t encoder)
|
| 690 |
|
|
{
|
| 691 |
|
|
if (!edge)
|
| 692 |
|
|
return;
|
| 693 |
|
|
|
| 694 |
|
|
/* Output edges in backward direction, so the reconstructed callgraph match
|
| 695 |
|
|
and it is easy to associate call sites in the IPA pass summaries. */
|
| 696 |
|
|
while (edge->next_callee)
|
| 697 |
|
|
edge = edge->next_callee;
|
| 698 |
|
|
for (; edge; edge = edge->prev_callee)
|
| 699 |
|
|
lto_output_edge (ob, edge, encoder);
|
| 700 |
|
|
}
|
| 701 |
|
|
|
| 702 |
|
|
/* Output the part of the cgraph in SET. */
|
| 703 |
|
|
|
| 704 |
|
|
static void
|
| 705 |
|
|
output_refs (cgraph_node_set set, varpool_node_set vset,
|
| 706 |
|
|
lto_cgraph_encoder_t encoder,
|
| 707 |
|
|
lto_varpool_encoder_t varpool_encoder)
|
| 708 |
|
|
{
|
| 709 |
|
|
cgraph_node_set_iterator csi;
|
| 710 |
|
|
varpool_node_set_iterator vsi;
|
| 711 |
|
|
struct lto_simple_output_block *ob;
|
| 712 |
|
|
int count;
|
| 713 |
|
|
struct ipa_ref *ref;
|
| 714 |
|
|
int i;
|
| 715 |
|
|
|
| 716 |
|
|
ob = lto_create_simple_output_block (LTO_section_refs);
|
| 717 |
|
|
|
| 718 |
|
|
for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
|
| 719 |
|
|
{
|
| 720 |
|
|
struct cgraph_node *node = csi_node (csi);
|
| 721 |
|
|
|
| 722 |
|
|
count = ipa_ref_list_nreferences (&node->ref_list);
|
| 723 |
|
|
if (count)
|
| 724 |
|
|
{
|
| 725 |
|
|
streamer_write_uhwi_stream (ob->main_stream, count);
|
| 726 |
|
|
streamer_write_uhwi_stream (ob->main_stream,
|
| 727 |
|
|
lto_cgraph_encoder_lookup (encoder, node));
|
| 728 |
|
|
for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
|
| 729 |
|
|
lto_output_ref (ob, ref, encoder, varpool_encoder);
|
| 730 |
|
|
}
|
| 731 |
|
|
}
|
| 732 |
|
|
|
| 733 |
|
|
streamer_write_uhwi_stream (ob->main_stream, 0);
|
| 734 |
|
|
|
| 735 |
|
|
for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
|
| 736 |
|
|
{
|
| 737 |
|
|
struct varpool_node *node = vsi_node (vsi);
|
| 738 |
|
|
|
| 739 |
|
|
count = ipa_ref_list_nreferences (&node->ref_list);
|
| 740 |
|
|
if (count)
|
| 741 |
|
|
{
|
| 742 |
|
|
streamer_write_uhwi_stream (ob->main_stream, count);
|
| 743 |
|
|
streamer_write_uhwi_stream (ob->main_stream,
|
| 744 |
|
|
lto_varpool_encoder_lookup (varpool_encoder,
|
| 745 |
|
|
node));
|
| 746 |
|
|
for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
|
| 747 |
|
|
lto_output_ref (ob, ref, encoder, varpool_encoder);
|
| 748 |
|
|
}
|
| 749 |
|
|
}
|
| 750 |
|
|
|
| 751 |
|
|
streamer_write_uhwi_stream (ob->main_stream, 0);
|
| 752 |
|
|
|
| 753 |
|
|
lto_destroy_simple_output_block (ob);
|
| 754 |
|
|
}
|
| 755 |
|
|
|
| 756 |
|
|
/* Find out all cgraph and varpool nodes we want to encode in current unit
|
| 757 |
|
|
and insert them to encoders. */
|
| 758 |
|
|
void
|
| 759 |
|
|
compute_ltrans_boundary (struct lto_out_decl_state *state,
|
| 760 |
|
|
cgraph_node_set set, varpool_node_set vset)
|
| 761 |
|
|
{
|
| 762 |
|
|
struct cgraph_node *node;
|
| 763 |
|
|
cgraph_node_set_iterator csi;
|
| 764 |
|
|
varpool_node_set_iterator vsi;
|
| 765 |
|
|
struct cgraph_edge *edge;
|
| 766 |
|
|
int i;
|
| 767 |
|
|
lto_cgraph_encoder_t encoder;
|
| 768 |
|
|
lto_varpool_encoder_t varpool_encoder;
|
| 769 |
|
|
|
| 770 |
|
|
encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
|
| 771 |
|
|
varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
|
| 772 |
|
|
|
| 773 |
|
|
/* Go over all the nodes in SET and assign references. */
|
| 774 |
|
|
for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
|
| 775 |
|
|
{
|
| 776 |
|
|
node = csi_node (csi);
|
| 777 |
|
|
add_node_to (encoder, node, true);
|
| 778 |
|
|
add_references (encoder, varpool_encoder, &node->ref_list);
|
| 779 |
|
|
}
|
| 780 |
|
|
for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
|
| 781 |
|
|
{
|
| 782 |
|
|
struct varpool_node *vnode = vsi_node (vsi);
|
| 783 |
|
|
gcc_assert (!vnode->alias || vnode->alias_of);
|
| 784 |
|
|
lto_varpool_encoder_encode (varpool_encoder, vnode);
|
| 785 |
|
|
lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
|
| 786 |
|
|
add_references (encoder, varpool_encoder, &vnode->ref_list);
|
| 787 |
|
|
}
|
| 788 |
|
|
/* Pickle in also the initializer of all referenced readonly variables
|
| 789 |
|
|
to help folding. Constant pool variables are not shared, so we must
|
| 790 |
|
|
pickle those too. */
|
| 791 |
|
|
for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
|
| 792 |
|
|
{
|
| 793 |
|
|
struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
|
| 794 |
|
|
if (DECL_INITIAL (vnode->decl)
|
| 795 |
|
|
&& !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
|
| 796 |
|
|
vnode)
|
| 797 |
|
|
&& const_value_known_p (vnode->decl))
|
| 798 |
|
|
{
|
| 799 |
|
|
lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
|
| 800 |
|
|
add_references (encoder, varpool_encoder, &vnode->ref_list);
|
| 801 |
|
|
}
|
| 802 |
|
|
}
|
| 803 |
|
|
|
| 804 |
|
|
/* Go over all the nodes again to include callees that are not in
|
| 805 |
|
|
SET. */
|
| 806 |
|
|
for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
|
| 807 |
|
|
{
|
| 808 |
|
|
node = csi_node (csi);
|
| 809 |
|
|
for (edge = node->callees; edge; edge = edge->next_callee)
|
| 810 |
|
|
{
|
| 811 |
|
|
struct cgraph_node *callee = edge->callee;
|
| 812 |
|
|
if (!cgraph_node_in_set_p (callee, set))
|
| 813 |
|
|
{
|
| 814 |
|
|
/* We should have moved all the inlines. */
|
| 815 |
|
|
gcc_assert (!callee->global.inlined_to);
|
| 816 |
|
|
add_node_to (encoder, callee, false);
|
| 817 |
|
|
}
|
| 818 |
|
|
}
|
| 819 |
|
|
}
|
| 820 |
|
|
}
|
| 821 |
|
|
|
| 822 |
|
|
/* Output the part of the cgraph in SET. */
|
| 823 |
|
|
|
| 824 |
|
|
void
|
| 825 |
|
|
output_cgraph (cgraph_node_set set, varpool_node_set vset)
|
| 826 |
|
|
{
|
| 827 |
|
|
struct cgraph_node *node;
|
| 828 |
|
|
struct lto_simple_output_block *ob;
|
| 829 |
|
|
cgraph_node_set_iterator csi;
|
| 830 |
|
|
int i, n_nodes;
|
| 831 |
|
|
lto_cgraph_encoder_t encoder;
|
| 832 |
|
|
lto_varpool_encoder_t varpool_encoder;
|
| 833 |
|
|
static bool asm_nodes_output = false;
|
| 834 |
|
|
|
| 835 |
|
|
if (flag_wpa)
|
| 836 |
|
|
output_cgraph_opt_summary (set);
|
| 837 |
|
|
|
| 838 |
|
|
ob = lto_create_simple_output_block (LTO_section_cgraph);
|
| 839 |
|
|
|
| 840 |
|
|
output_profile_summary (ob);
|
| 841 |
|
|
|
| 842 |
|
|
/* An encoder for cgraph nodes should have been created by
|
| 843 |
|
|
ipa_write_summaries_1. */
|
| 844 |
|
|
gcc_assert (ob->decl_state->cgraph_node_encoder);
|
| 845 |
|
|
gcc_assert (ob->decl_state->varpool_node_encoder);
|
| 846 |
|
|
encoder = ob->decl_state->cgraph_node_encoder;
|
| 847 |
|
|
varpool_encoder = ob->decl_state->varpool_node_encoder;
|
| 848 |
|
|
|
| 849 |
|
|
/* Write out the nodes. We must first output a node and then its clones,
|
| 850 |
|
|
otherwise at a time reading back the node there would be nothing to clone
|
| 851 |
|
|
from. */
|
| 852 |
|
|
n_nodes = lto_cgraph_encoder_size (encoder);
|
| 853 |
|
|
for (i = 0; i < n_nodes; i++)
|
| 854 |
|
|
{
|
| 855 |
|
|
node = lto_cgraph_encoder_deref (encoder, i);
|
| 856 |
|
|
lto_output_node (ob, node, encoder, set, vset);
|
| 857 |
|
|
}
|
| 858 |
|
|
|
| 859 |
|
|
/* Go over the nodes in SET again to write edges. */
|
| 860 |
|
|
for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
|
| 861 |
|
|
{
|
| 862 |
|
|
node = csi_node (csi);
|
| 863 |
|
|
output_outgoing_cgraph_edges (node->callees, ob, encoder);
|
| 864 |
|
|
output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
|
| 865 |
|
|
}
|
| 866 |
|
|
|
| 867 |
|
|
streamer_write_uhwi_stream (ob->main_stream, 0);
|
| 868 |
|
|
|
| 869 |
|
|
lto_destroy_simple_output_block (ob);
|
| 870 |
|
|
|
| 871 |
|
|
/* Emit toplevel asms.
|
| 872 |
|
|
When doing WPA we must output every asm just once. Since we do not partition asm
|
| 873 |
|
|
nodes at all, output them to first output. This is kind of hack, but should work
|
| 874 |
|
|
well. */
|
| 875 |
|
|
if (!asm_nodes_output)
|
| 876 |
|
|
{
|
| 877 |
|
|
asm_nodes_output = true;
|
| 878 |
|
|
lto_output_toplevel_asms ();
|
| 879 |
|
|
}
|
| 880 |
|
|
|
| 881 |
|
|
output_varpool (set, vset);
|
| 882 |
|
|
output_refs (set, vset, encoder, varpool_encoder);
|
| 883 |
|
|
}
|
| 884 |
|
|
|
| 885 |
|
|
/* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
|
| 886 |
|
|
STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
|
| 887 |
|
|
NODE or to replace the values in it, for instance because the first
|
| 888 |
|
|
time we saw it, the function body was not available but now it
|
| 889 |
|
|
is. BP is a bitpack with all the bitflags for NODE read from the
|
| 890 |
|
|
stream. */
|
| 891 |
|
|
|
| 892 |
|
|
static void
|
| 893 |
|
|
input_overwrite_node (struct lto_file_decl_data *file_data,
|
| 894 |
|
|
struct cgraph_node *node,
|
| 895 |
|
|
enum LTO_cgraph_tags tag,
|
| 896 |
|
|
struct bitpack_d *bp)
|
| 897 |
|
|
{
|
| 898 |
|
|
node->aux = (void *) tag;
|
| 899 |
|
|
node->local.lto_file_data = file_data;
|
| 900 |
|
|
|
| 901 |
|
|
node->local.local = bp_unpack_value (bp, 1);
|
| 902 |
|
|
node->local.externally_visible = bp_unpack_value (bp, 1);
|
| 903 |
|
|
node->local.finalized = bp_unpack_value (bp, 1);
|
| 904 |
|
|
node->local.versionable = bp_unpack_value (bp, 1);
|
| 905 |
|
|
node->local.can_change_signature = bp_unpack_value (bp, 1);
|
| 906 |
|
|
node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
|
| 907 |
|
|
node->needed = bp_unpack_value (bp, 1);
|
| 908 |
|
|
node->address_taken = bp_unpack_value (bp, 1);
|
| 909 |
|
|
node->abstract_and_needed = bp_unpack_value (bp, 1);
|
| 910 |
|
|
node->reachable_from_other_partition = bp_unpack_value (bp, 1);
|
| 911 |
|
|
node->lowered = bp_unpack_value (bp, 1);
|
| 912 |
|
|
node->analyzed = tag == LTO_cgraph_analyzed_node;
|
| 913 |
|
|
node->in_other_partition = bp_unpack_value (bp, 1);
|
| 914 |
|
|
if (node->in_other_partition
|
| 915 |
|
|
/* Avoid updating decl when we are seeing just inline clone.
|
| 916 |
|
|
When inlining function that has functions already inlined into it,
|
| 917 |
|
|
we produce clones of inline clones.
|
| 918 |
|
|
|
| 919 |
|
|
WPA partitioning might put each clone into different unit and
|
| 920 |
|
|
we might end up streaming inline clone from other partition
|
| 921 |
|
|
to support clone we are interested in. */
|
| 922 |
|
|
&& (!node->clone_of
|
| 923 |
|
|
|| node->clone_of->decl != node->decl))
|
| 924 |
|
|
{
|
| 925 |
|
|
DECL_EXTERNAL (node->decl) = 1;
|
| 926 |
|
|
TREE_STATIC (node->decl) = 0;
|
| 927 |
|
|
}
|
| 928 |
|
|
node->alias = bp_unpack_value (bp, 1);
|
| 929 |
|
|
node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
|
| 930 |
|
|
node->only_called_at_startup = bp_unpack_value (bp, 1);
|
| 931 |
|
|
node->only_called_at_exit = bp_unpack_value (bp, 1);
|
| 932 |
|
|
node->tm_clone = bp_unpack_value (bp, 1);
|
| 933 |
|
|
node->thunk.thunk_p = bp_unpack_value (bp, 1);
|
| 934 |
|
|
node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
|
| 935 |
|
|
LDPR_NUM_KNOWN);
|
| 936 |
|
|
}
|
| 937 |
|
|
|
| 938 |
|
|
/* Output the part of the cgraph in SET. */
|
| 939 |
|
|
|
| 940 |
|
|
static void
|
| 941 |
|
|
output_varpool (cgraph_node_set set, varpool_node_set vset)
|
| 942 |
|
|
{
|
| 943 |
|
|
struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
|
| 944 |
|
|
lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
|
| 945 |
|
|
int len = lto_varpool_encoder_size (varpool_encoder), i;
|
| 946 |
|
|
|
| 947 |
|
|
streamer_write_uhwi_stream (ob->main_stream, len);
|
| 948 |
|
|
|
| 949 |
|
|
/* Write out the nodes. We must first output a node and then its clones,
|
| 950 |
|
|
otherwise at a time reading back the node there would be nothing to clone
|
| 951 |
|
|
from. */
|
| 952 |
|
|
for (i = 0; i < len; i++)
|
| 953 |
|
|
{
|
| 954 |
|
|
lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
|
| 955 |
|
|
varpool_encoder,
|
| 956 |
|
|
set, vset);
|
| 957 |
|
|
}
|
| 958 |
|
|
|
| 959 |
|
|
lto_destroy_simple_output_block (ob);
|
| 960 |
|
|
}
|
| 961 |
|
|
|
| 962 |
|
|
/* Read a node from input_block IB. TAG is the node's tag just read.
|
| 963 |
|
|
Return the node read or overwriten. */
|
| 964 |
|
|
|
| 965 |
|
|
static struct cgraph_node *
|
| 966 |
|
|
input_node (struct lto_file_decl_data *file_data,
|
| 967 |
|
|
struct lto_input_block *ib,
|
| 968 |
|
|
enum LTO_cgraph_tags tag,
|
| 969 |
|
|
VEC(cgraph_node_ptr, heap) *nodes)
|
| 970 |
|
|
{
|
| 971 |
|
|
tree fn_decl;
|
| 972 |
|
|
struct cgraph_node *node;
|
| 973 |
|
|
struct bitpack_d bp;
|
| 974 |
|
|
unsigned decl_index;
|
| 975 |
|
|
int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
|
| 976 |
|
|
int clone_ref;
|
| 977 |
|
|
int order;
|
| 978 |
|
|
|
| 979 |
|
|
order = streamer_read_hwi (ib) + order_base;
|
| 980 |
|
|
clone_ref = streamer_read_hwi (ib);
|
| 981 |
|
|
|
| 982 |
|
|
decl_index = streamer_read_uhwi (ib);
|
| 983 |
|
|
fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
|
| 984 |
|
|
|
| 985 |
|
|
if (clone_ref != LCC_NOT_FOUND)
|
| 986 |
|
|
{
|
| 987 |
|
|
node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
|
| 988 |
|
|
0, CGRAPH_FREQ_BASE, false, NULL, false);
|
| 989 |
|
|
}
|
| 990 |
|
|
else
|
| 991 |
|
|
node = cgraph_get_create_node (fn_decl);
|
| 992 |
|
|
|
| 993 |
|
|
node->order = order;
|
| 994 |
|
|
if (order >= cgraph_order)
|
| 995 |
|
|
cgraph_order = order + 1;
|
| 996 |
|
|
|
| 997 |
|
|
node->count = streamer_read_hwi (ib);
|
| 998 |
|
|
node->count_materialization_scale = streamer_read_hwi (ib);
|
| 999 |
|
|
|
| 1000 |
|
|
if (tag == LTO_cgraph_analyzed_node)
|
| 1001 |
|
|
ref = streamer_read_hwi (ib);
|
| 1002 |
|
|
|
| 1003 |
|
|
ref2 = streamer_read_hwi (ib);
|
| 1004 |
|
|
|
| 1005 |
|
|
/* Make sure that we have not read this node before. Nodes that
|
| 1006 |
|
|
have already been read will have their tag stored in the 'aux'
|
| 1007 |
|
|
field. Since built-in functions can be referenced in multiple
|
| 1008 |
|
|
functions, they are expected to be read more than once. */
|
| 1009 |
|
|
if (node->aux && !DECL_BUILT_IN (node->decl))
|
| 1010 |
|
|
internal_error ("bytecode stream: found multiple instances of cgraph "
|
| 1011 |
|
|
"node %d", node->uid);
|
| 1012 |
|
|
|
| 1013 |
|
|
bp = streamer_read_bitpack (ib);
|
| 1014 |
|
|
input_overwrite_node (file_data, node, tag, &bp);
|
| 1015 |
|
|
|
| 1016 |
|
|
/* Store a reference for now, and fix up later to be a pointer. */
|
| 1017 |
|
|
node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
|
| 1018 |
|
|
|
| 1019 |
|
|
/* Store a reference for now, and fix up later to be a pointer. */
|
| 1020 |
|
|
node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
|
| 1021 |
|
|
|
| 1022 |
|
|
if (node->thunk.thunk_p)
|
| 1023 |
|
|
{
|
| 1024 |
|
|
int type = streamer_read_uhwi (ib);
|
| 1025 |
|
|
HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
|
| 1026 |
|
|
HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
|
| 1027 |
|
|
|
| 1028 |
|
|
node->thunk.fixed_offset = fixed_offset;
|
| 1029 |
|
|
node->thunk.this_adjusting = (type & 2);
|
| 1030 |
|
|
node->thunk.virtual_value = virtual_value;
|
| 1031 |
|
|
node->thunk.virtual_offset_p = (type & 4);
|
| 1032 |
|
|
}
|
| 1033 |
|
|
if (node->thunk.thunk_p || node->alias)
|
| 1034 |
|
|
{
|
| 1035 |
|
|
if (streamer_read_hwi_in_range (ib, "alias nonzero flag", 0, 1))
|
| 1036 |
|
|
{
|
| 1037 |
|
|
decl_index = streamer_read_uhwi (ib);
|
| 1038 |
|
|
node->thunk.alias = lto_file_decl_data_get_fn_decl (file_data,
|
| 1039 |
|
|
decl_index);
|
| 1040 |
|
|
}
|
| 1041 |
|
|
}
|
| 1042 |
|
|
return node;
|
| 1043 |
|
|
}
|
| 1044 |
|
|
|
| 1045 |
|
|
/* Read a node from input_block IB. TAG is the node's tag just read.
|
| 1046 |
|
|
Return the node read or overwriten. */
|
| 1047 |
|
|
|
| 1048 |
|
|
static struct varpool_node *
|
| 1049 |
|
|
input_varpool_node (struct lto_file_decl_data *file_data,
|
| 1050 |
|
|
struct lto_input_block *ib)
|
| 1051 |
|
|
{
|
| 1052 |
|
|
int decl_index;
|
| 1053 |
|
|
tree var_decl;
|
| 1054 |
|
|
struct varpool_node *node;
|
| 1055 |
|
|
struct bitpack_d bp;
|
| 1056 |
|
|
int ref = LCC_NOT_FOUND;
|
| 1057 |
|
|
bool non_null_aliasof;
|
| 1058 |
|
|
int order;
|
| 1059 |
|
|
|
| 1060 |
|
|
order = streamer_read_hwi (ib) + order_base;
|
| 1061 |
|
|
decl_index = streamer_read_uhwi (ib);
|
| 1062 |
|
|
var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
|
| 1063 |
|
|
node = varpool_node (var_decl);
|
| 1064 |
|
|
node->order = order;
|
| 1065 |
|
|
if (order >= cgraph_order)
|
| 1066 |
|
|
cgraph_order = order + 1;
|
| 1067 |
|
|
node->lto_file_data = file_data;
|
| 1068 |
|
|
|
| 1069 |
|
|
bp = streamer_read_bitpack (ib);
|
| 1070 |
|
|
node->externally_visible = bp_unpack_value (&bp, 1);
|
| 1071 |
|
|
node->force_output = bp_unpack_value (&bp, 1);
|
| 1072 |
|
|
node->finalized = bp_unpack_value (&bp, 1);
|
| 1073 |
|
|
node->alias = bp_unpack_value (&bp, 1);
|
| 1074 |
|
|
non_null_aliasof = bp_unpack_value (&bp, 1);
|
| 1075 |
|
|
node->analyzed = node->finalized;
|
| 1076 |
|
|
node->used_from_other_partition = bp_unpack_value (&bp, 1);
|
| 1077 |
|
|
node->in_other_partition = bp_unpack_value (&bp, 1);
|
| 1078 |
|
|
if (node->in_other_partition)
|
| 1079 |
|
|
{
|
| 1080 |
|
|
DECL_EXTERNAL (node->decl) = 1;
|
| 1081 |
|
|
TREE_STATIC (node->decl) = 0;
|
| 1082 |
|
|
}
|
| 1083 |
|
|
if (node->finalized)
|
| 1084 |
|
|
varpool_mark_needed_node (node);
|
| 1085 |
|
|
if (non_null_aliasof)
|
| 1086 |
|
|
{
|
| 1087 |
|
|
decl_index = streamer_read_uhwi (ib);
|
| 1088 |
|
|
node->alias_of = lto_file_decl_data_get_var_decl (file_data, decl_index);
|
| 1089 |
|
|
}
|
| 1090 |
|
|
ref = streamer_read_hwi (ib);
|
| 1091 |
|
|
/* Store a reference for now, and fix up later to be a pointer. */
|
| 1092 |
|
|
node->same_comdat_group = (struct varpool_node *) (intptr_t) ref;
|
| 1093 |
|
|
node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
|
| 1094 |
|
|
LDPR_NUM_KNOWN);
|
| 1095 |
|
|
|
| 1096 |
|
|
return node;
|
| 1097 |
|
|
}
|
| 1098 |
|
|
|
| 1099 |
|
|
/* Read a node from input_block IB. TAG is the node's tag just read.
|
| 1100 |
|
|
Return the node read or overwriten. */
|
| 1101 |
|
|
|
| 1102 |
|
|
static void
|
| 1103 |
|
|
input_ref (struct lto_input_block *ib,
|
| 1104 |
|
|
struct cgraph_node *refering_node,
|
| 1105 |
|
|
struct varpool_node *refering_varpool_node,
|
| 1106 |
|
|
VEC(cgraph_node_ptr, heap) *nodes,
|
| 1107 |
|
|
VEC(varpool_node_ptr, heap) *varpool_nodes)
|
| 1108 |
|
|
{
|
| 1109 |
|
|
struct cgraph_node *node = NULL;
|
| 1110 |
|
|
struct varpool_node *varpool_node = NULL;
|
| 1111 |
|
|
struct bitpack_d bp;
|
| 1112 |
|
|
enum ipa_ref_type type;
|
| 1113 |
|
|
enum ipa_ref_use use;
|
| 1114 |
|
|
|
| 1115 |
|
|
bp = streamer_read_bitpack (ib);
|
| 1116 |
|
|
type = (enum ipa_ref_type) bp_unpack_value (&bp, 1);
|
| 1117 |
|
|
use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
|
| 1118 |
|
|
if (type == IPA_REF_CGRAPH)
|
| 1119 |
|
|
node = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
|
| 1120 |
|
|
else
|
| 1121 |
|
|
varpool_node = VEC_index (varpool_node_ptr, varpool_nodes,
|
| 1122 |
|
|
streamer_read_hwi (ib));
|
| 1123 |
|
|
ipa_record_reference (refering_node, refering_varpool_node,
|
| 1124 |
|
|
node, varpool_node, use, NULL);
|
| 1125 |
|
|
}
|
| 1126 |
|
|
|
| 1127 |
|
|
/* Read an edge from IB. NODES points to a vector of previously read nodes for
|
| 1128 |
|
|
decoding caller and callee of the edge to be read. If INDIRECT is true, the
|
| 1129 |
|
|
edge being read is indirect (in the sense that it has
|
| 1130 |
|
|
indirect_unknown_callee set). */
|
| 1131 |
|
|
|
| 1132 |
|
|
static void
|
| 1133 |
|
|
input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
|
| 1134 |
|
|
bool indirect)
|
| 1135 |
|
|
{
|
| 1136 |
|
|
struct cgraph_node *caller, *callee;
|
| 1137 |
|
|
struct cgraph_edge *edge;
|
| 1138 |
|
|
unsigned int stmt_id;
|
| 1139 |
|
|
gcov_type count;
|
| 1140 |
|
|
int freq;
|
| 1141 |
|
|
cgraph_inline_failed_t inline_failed;
|
| 1142 |
|
|
struct bitpack_d bp;
|
| 1143 |
|
|
int ecf_flags = 0;
|
| 1144 |
|
|
|
| 1145 |
|
|
caller = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
|
| 1146 |
|
|
if (caller == NULL || caller->decl == NULL_TREE)
|
| 1147 |
|
|
internal_error ("bytecode stream: no caller found while reading edge");
|
| 1148 |
|
|
|
| 1149 |
|
|
if (!indirect)
|
| 1150 |
|
|
{
|
| 1151 |
|
|
callee = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
|
| 1152 |
|
|
if (callee == NULL || callee->decl == NULL_TREE)
|
| 1153 |
|
|
internal_error ("bytecode stream: no callee found while reading edge");
|
| 1154 |
|
|
}
|
| 1155 |
|
|
else
|
| 1156 |
|
|
callee = NULL;
|
| 1157 |
|
|
|
| 1158 |
|
|
count = (gcov_type) streamer_read_hwi (ib);
|
| 1159 |
|
|
|
| 1160 |
|
|
bp = streamer_read_bitpack (ib);
|
| 1161 |
|
|
inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_enum, CIF_N_REASONS);
|
| 1162 |
|
|
stmt_id = bp_unpack_var_len_unsigned (&bp);
|
| 1163 |
|
|
freq = (int) bp_unpack_var_len_unsigned (&bp);
|
| 1164 |
|
|
|
| 1165 |
|
|
if (indirect)
|
| 1166 |
|
|
edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
|
| 1167 |
|
|
else
|
| 1168 |
|
|
edge = cgraph_create_edge (caller, callee, NULL, count, freq);
|
| 1169 |
|
|
|
| 1170 |
|
|
edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
|
| 1171 |
|
|
edge->lto_stmt_uid = stmt_id;
|
| 1172 |
|
|
edge->inline_failed = inline_failed;
|
| 1173 |
|
|
edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
|
| 1174 |
|
|
edge->can_throw_external = bp_unpack_value (&bp, 1);
|
| 1175 |
|
|
if (indirect)
|
| 1176 |
|
|
{
|
| 1177 |
|
|
if (bp_unpack_value (&bp, 1))
|
| 1178 |
|
|
ecf_flags |= ECF_CONST;
|
| 1179 |
|
|
if (bp_unpack_value (&bp, 1))
|
| 1180 |
|
|
ecf_flags |= ECF_PURE;
|
| 1181 |
|
|
if (bp_unpack_value (&bp, 1))
|
| 1182 |
|
|
ecf_flags |= ECF_NORETURN;
|
| 1183 |
|
|
if (bp_unpack_value (&bp, 1))
|
| 1184 |
|
|
ecf_flags |= ECF_MALLOC;
|
| 1185 |
|
|
if (bp_unpack_value (&bp, 1))
|
| 1186 |
|
|
ecf_flags |= ECF_NOTHROW;
|
| 1187 |
|
|
if (bp_unpack_value (&bp, 1))
|
| 1188 |
|
|
ecf_flags |= ECF_RETURNS_TWICE;
|
| 1189 |
|
|
edge->indirect_info->ecf_flags = ecf_flags;
|
| 1190 |
|
|
}
|
| 1191 |
|
|
}
|
| 1192 |
|
|
|
| 1193 |
|
|
|
| 1194 |
|
|
/* Read a cgraph from IB using the info in FILE_DATA. */
|
| 1195 |
|
|
|
| 1196 |
|
|
static VEC(cgraph_node_ptr, heap) *
|
| 1197 |
|
|
input_cgraph_1 (struct lto_file_decl_data *file_data,
|
| 1198 |
|
|
struct lto_input_block *ib)
|
| 1199 |
|
|
{
|
| 1200 |
|
|
enum LTO_cgraph_tags tag;
|
| 1201 |
|
|
VEC(cgraph_node_ptr, heap) *nodes = NULL;
|
| 1202 |
|
|
struct cgraph_node *node;
|
| 1203 |
|
|
unsigned i;
|
| 1204 |
|
|
|
| 1205 |
|
|
tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
|
| 1206 |
|
|
order_base = cgraph_order;
|
| 1207 |
|
|
while (tag)
|
| 1208 |
|
|
{
|
| 1209 |
|
|
if (tag == LTO_cgraph_edge)
|
| 1210 |
|
|
input_edge (ib, nodes, false);
|
| 1211 |
|
|
else if (tag == LTO_cgraph_indirect_edge)
|
| 1212 |
|
|
input_edge (ib, nodes, true);
|
| 1213 |
|
|
else
|
| 1214 |
|
|
{
|
| 1215 |
|
|
node = input_node (file_data, ib, tag,nodes);
|
| 1216 |
|
|
if (node == NULL || node->decl == NULL_TREE)
|
| 1217 |
|
|
internal_error ("bytecode stream: found empty cgraph node");
|
| 1218 |
|
|
VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
|
| 1219 |
|
|
lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
|
| 1220 |
|
|
}
|
| 1221 |
|
|
|
| 1222 |
|
|
tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
|
| 1223 |
|
|
}
|
| 1224 |
|
|
|
| 1225 |
|
|
lto_input_toplevel_asms (file_data, order_base);
|
| 1226 |
|
|
|
| 1227 |
|
|
/* AUX pointers should be all non-zero for nodes read from the stream. */
|
| 1228 |
|
|
#ifdef ENABLE_CHECKING
|
| 1229 |
|
|
FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
|
| 1230 |
|
|
gcc_assert (node->aux);
|
| 1231 |
|
|
#endif
|
| 1232 |
|
|
FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
|
| 1233 |
|
|
{
|
| 1234 |
|
|
int ref = (int) (intptr_t) node->global.inlined_to;
|
| 1235 |
|
|
|
| 1236 |
|
|
/* We share declaration of builtins, so we may read same node twice. */
|
| 1237 |
|
|
if (!node->aux)
|
| 1238 |
|
|
continue;
|
| 1239 |
|
|
node->aux = NULL;
|
| 1240 |
|
|
|
| 1241 |
|
|
/* Fixup inlined_to from reference to pointer. */
|
| 1242 |
|
|
if (ref != LCC_NOT_FOUND)
|
| 1243 |
|
|
node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
|
| 1244 |
|
|
else
|
| 1245 |
|
|
node->global.inlined_to = NULL;
|
| 1246 |
|
|
|
| 1247 |
|
|
ref = (int) (intptr_t) node->same_comdat_group;
|
| 1248 |
|
|
|
| 1249 |
|
|
/* Fixup same_comdat_group from reference to pointer. */
|
| 1250 |
|
|
if (ref != LCC_NOT_FOUND)
|
| 1251 |
|
|
node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
|
| 1252 |
|
|
else
|
| 1253 |
|
|
node->same_comdat_group = NULL;
|
| 1254 |
|
|
}
|
| 1255 |
|
|
FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
|
| 1256 |
|
|
node->aux = (void *)1;
|
| 1257 |
|
|
return nodes;
|
| 1258 |
|
|
}
|
| 1259 |
|
|
|
| 1260 |
|
|
/* Read a varpool from IB using the info in FILE_DATA. */
|
| 1261 |
|
|
|
| 1262 |
|
|
static VEC(varpool_node_ptr, heap) *
|
| 1263 |
|
|
input_varpool_1 (struct lto_file_decl_data *file_data,
|
| 1264 |
|
|
struct lto_input_block *ib)
|
| 1265 |
|
|
{
|
| 1266 |
|
|
unsigned HOST_WIDE_INT len;
|
| 1267 |
|
|
VEC(varpool_node_ptr, heap) *varpool = NULL;
|
| 1268 |
|
|
int i;
|
| 1269 |
|
|
struct varpool_node *node;
|
| 1270 |
|
|
|
| 1271 |
|
|
len = streamer_read_uhwi (ib);
|
| 1272 |
|
|
while (len)
|
| 1273 |
|
|
{
|
| 1274 |
|
|
VEC_safe_push (varpool_node_ptr, heap, varpool,
|
| 1275 |
|
|
input_varpool_node (file_data, ib));
|
| 1276 |
|
|
len--;
|
| 1277 |
|
|
}
|
| 1278 |
|
|
#ifdef ENABLE_CHECKING
|
| 1279 |
|
|
FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
|
| 1280 |
|
|
gcc_assert (!node->aux);
|
| 1281 |
|
|
#endif
|
| 1282 |
|
|
FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
|
| 1283 |
|
|
{
|
| 1284 |
|
|
int ref = (int) (intptr_t) node->same_comdat_group;
|
| 1285 |
|
|
/* We share declaration of builtins, so we may read same node twice. */
|
| 1286 |
|
|
if (node->aux)
|
| 1287 |
|
|
continue;
|
| 1288 |
|
|
node->aux = (void *)1;
|
| 1289 |
|
|
|
| 1290 |
|
|
/* Fixup same_comdat_group from reference to pointer. */
|
| 1291 |
|
|
if (ref != LCC_NOT_FOUND)
|
| 1292 |
|
|
node->same_comdat_group = VEC_index (varpool_node_ptr, varpool, ref);
|
| 1293 |
|
|
else
|
| 1294 |
|
|
node->same_comdat_group = NULL;
|
| 1295 |
|
|
}
|
| 1296 |
|
|
FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
|
| 1297 |
|
|
node->aux = NULL;
|
| 1298 |
|
|
return varpool;
|
| 1299 |
|
|
}
|
| 1300 |
|
|
|
| 1301 |
|
|
/* Input ipa_refs. */
|
| 1302 |
|
|
|
| 1303 |
|
|
static void
|
| 1304 |
|
|
input_refs (struct lto_input_block *ib,
|
| 1305 |
|
|
VEC(cgraph_node_ptr, heap) *nodes,
|
| 1306 |
|
|
VEC(varpool_node_ptr, heap) *varpool)
|
| 1307 |
|
|
{
|
| 1308 |
|
|
int count;
|
| 1309 |
|
|
int idx;
|
| 1310 |
|
|
while (true)
|
| 1311 |
|
|
{
|
| 1312 |
|
|
struct cgraph_node *node;
|
| 1313 |
|
|
count = streamer_read_uhwi (ib);
|
| 1314 |
|
|
if (!count)
|
| 1315 |
|
|
break;
|
| 1316 |
|
|
idx = streamer_read_uhwi (ib);
|
| 1317 |
|
|
node = VEC_index (cgraph_node_ptr, nodes, idx);
|
| 1318 |
|
|
while (count)
|
| 1319 |
|
|
{
|
| 1320 |
|
|
input_ref (ib, node, NULL, nodes, varpool);
|
| 1321 |
|
|
count--;
|
| 1322 |
|
|
}
|
| 1323 |
|
|
}
|
| 1324 |
|
|
while (true)
|
| 1325 |
|
|
{
|
| 1326 |
|
|
struct varpool_node *node;
|
| 1327 |
|
|
count = streamer_read_uhwi (ib);
|
| 1328 |
|
|
if (!count)
|
| 1329 |
|
|
break;
|
| 1330 |
|
|
node = VEC_index (varpool_node_ptr, varpool,
|
| 1331 |
|
|
streamer_read_uhwi (ib));
|
| 1332 |
|
|
while (count)
|
| 1333 |
|
|
{
|
| 1334 |
|
|
input_ref (ib, NULL, node, nodes, varpool);
|
| 1335 |
|
|
count--;
|
| 1336 |
|
|
}
|
| 1337 |
|
|
}
|
| 1338 |
|
|
}
|
| 1339 |
|
|
|
| 1340 |
|
|
|
| 1341 |
|
|
static struct gcov_ctr_summary lto_gcov_summary;
|
| 1342 |
|
|
|
| 1343 |
|
|
/* Input profile_info from IB. */
|
| 1344 |
|
|
static void
|
| 1345 |
|
|
input_profile_summary (struct lto_input_block *ib,
|
| 1346 |
|
|
struct lto_file_decl_data *file_data)
|
| 1347 |
|
|
{
|
| 1348 |
|
|
unsigned int runs = streamer_read_uhwi (ib);
|
| 1349 |
|
|
if (runs)
|
| 1350 |
|
|
{
|
| 1351 |
|
|
file_data->profile_info.runs = runs;
|
| 1352 |
|
|
file_data->profile_info.sum_max = streamer_read_uhwi (ib);
|
| 1353 |
|
|
}
|
| 1354 |
|
|
|
| 1355 |
|
|
}
|
| 1356 |
|
|
|
| 1357 |
|
|
/* Rescale profile summaries to the same number of runs in the whole unit. */
|
| 1358 |
|
|
|
| 1359 |
|
|
static void
|
| 1360 |
|
|
merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
|
| 1361 |
|
|
{
|
| 1362 |
|
|
struct lto_file_decl_data *file_data;
|
| 1363 |
|
|
unsigned int j;
|
| 1364 |
|
|
gcov_unsigned_t max_runs = 0;
|
| 1365 |
|
|
struct cgraph_node *node;
|
| 1366 |
|
|
struct cgraph_edge *edge;
|
| 1367 |
|
|
|
| 1368 |
|
|
/* Find unit with maximal number of runs. If we ever get serious about
|
| 1369 |
|
|
roundoff errors, we might also consider computing smallest common
|
| 1370 |
|
|
multiply. */
|
| 1371 |
|
|
for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
|
| 1372 |
|
|
if (max_runs < file_data->profile_info.runs)
|
| 1373 |
|
|
max_runs = file_data->profile_info.runs;
|
| 1374 |
|
|
|
| 1375 |
|
|
if (!max_runs)
|
| 1376 |
|
|
return;
|
| 1377 |
|
|
|
| 1378 |
|
|
/* Simple overflow check. We probably don't need to support that many train
|
| 1379 |
|
|
runs. Such a large value probably imply data corruption anyway. */
|
| 1380 |
|
|
if (max_runs > INT_MAX / REG_BR_PROB_BASE)
|
| 1381 |
|
|
{
|
| 1382 |
|
|
sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
|
| 1383 |
|
|
INT_MAX / REG_BR_PROB_BASE);
|
| 1384 |
|
|
return;
|
| 1385 |
|
|
}
|
| 1386 |
|
|
|
| 1387 |
|
|
profile_info = <o_gcov_summary;
|
| 1388 |
|
|
lto_gcov_summary.runs = max_runs;
|
| 1389 |
|
|
lto_gcov_summary.sum_max = 0;
|
| 1390 |
|
|
|
| 1391 |
|
|
/* Rescale all units to the maximal number of runs.
|
| 1392 |
|
|
sum_max can not be easily merged, as we have no idea what files come from
|
| 1393 |
|
|
the same run. We do not use the info anyway, so leave it 0. */
|
| 1394 |
|
|
for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
|
| 1395 |
|
|
if (file_data->profile_info.runs)
|
| 1396 |
|
|
{
|
| 1397 |
|
|
int scale = ((REG_BR_PROB_BASE * max_runs
|
| 1398 |
|
|
+ file_data->profile_info.runs / 2)
|
| 1399 |
|
|
/ file_data->profile_info.runs);
|
| 1400 |
|
|
lto_gcov_summary.sum_max = MAX (lto_gcov_summary.sum_max,
|
| 1401 |
|
|
(file_data->profile_info.sum_max
|
| 1402 |
|
|
* scale
|
| 1403 |
|
|
+ REG_BR_PROB_BASE / 2)
|
| 1404 |
|
|
/ REG_BR_PROB_BASE);
|
| 1405 |
|
|
}
|
| 1406 |
|
|
|
| 1407 |
|
|
/* Watch roundoff errors. */
|
| 1408 |
|
|
if (lto_gcov_summary.sum_max < max_runs)
|
| 1409 |
|
|
lto_gcov_summary.sum_max = max_runs;
|
| 1410 |
|
|
|
| 1411 |
|
|
/* If merging already happent at WPA time, we are done. */
|
| 1412 |
|
|
if (flag_ltrans)
|
| 1413 |
|
|
return;
|
| 1414 |
|
|
|
| 1415 |
|
|
/* Now compute count_materialization_scale of each node.
|
| 1416 |
|
|
During LTRANS we already have values of count_materialization_scale
|
| 1417 |
|
|
computed, so just update them. */
|
| 1418 |
|
|
for (node = cgraph_nodes; node; node = node->next)
|
| 1419 |
|
|
if (node->local.lto_file_data
|
| 1420 |
|
|
&& node->local.lto_file_data->profile_info.runs)
|
| 1421 |
|
|
{
|
| 1422 |
|
|
int scale;
|
| 1423 |
|
|
|
| 1424 |
|
|
scale =
|
| 1425 |
|
|
((node->count_materialization_scale * max_runs
|
| 1426 |
|
|
+ node->local.lto_file_data->profile_info.runs / 2)
|
| 1427 |
|
|
/ node->local.lto_file_data->profile_info.runs);
|
| 1428 |
|
|
node->count_materialization_scale = scale;
|
| 1429 |
|
|
if (scale < 0)
|
| 1430 |
|
|
fatal_error ("Profile information in %s corrupted",
|
| 1431 |
|
|
file_data->file_name);
|
| 1432 |
|
|
|
| 1433 |
|
|
if (scale == REG_BR_PROB_BASE)
|
| 1434 |
|
|
continue;
|
| 1435 |
|
|
for (edge = node->callees; edge; edge = edge->next_callee)
|
| 1436 |
|
|
edge->count = ((edge->count * scale + REG_BR_PROB_BASE / 2)
|
| 1437 |
|
|
/ REG_BR_PROB_BASE);
|
| 1438 |
|
|
node->count = ((node->count * scale + REG_BR_PROB_BASE / 2)
|
| 1439 |
|
|
/ REG_BR_PROB_BASE);
|
| 1440 |
|
|
}
|
| 1441 |
|
|
}
|
| 1442 |
|
|
|
| 1443 |
|
|
/* Input and merge the cgraph from each of the .o files passed to
|
| 1444 |
|
|
lto1. */
|
| 1445 |
|
|
|
| 1446 |
|
|
void
|
| 1447 |
|
|
input_cgraph (void)
|
| 1448 |
|
|
{
|
| 1449 |
|
|
struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
|
| 1450 |
|
|
struct lto_file_decl_data *file_data;
|
| 1451 |
|
|
unsigned int j = 0;
|
| 1452 |
|
|
struct cgraph_node *node;
|
| 1453 |
|
|
|
| 1454 |
|
|
while ((file_data = file_data_vec[j++]))
|
| 1455 |
|
|
{
|
| 1456 |
|
|
const char *data;
|
| 1457 |
|
|
size_t len;
|
| 1458 |
|
|
struct lto_input_block *ib;
|
| 1459 |
|
|
VEC(cgraph_node_ptr, heap) *nodes;
|
| 1460 |
|
|
VEC(varpool_node_ptr, heap) *varpool;
|
| 1461 |
|
|
|
| 1462 |
|
|
ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
|
| 1463 |
|
|
&data, &len);
|
| 1464 |
|
|
if (!ib)
|
| 1465 |
|
|
fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
|
| 1466 |
|
|
input_profile_summary (ib, file_data);
|
| 1467 |
|
|
file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
|
| 1468 |
|
|
nodes = input_cgraph_1 (file_data, ib);
|
| 1469 |
|
|
lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
|
| 1470 |
|
|
ib, data, len);
|
| 1471 |
|
|
|
| 1472 |
|
|
ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
|
| 1473 |
|
|
&data, &len);
|
| 1474 |
|
|
if (!ib)
|
| 1475 |
|
|
fatal_error ("cannot find LTO varpool in %s", file_data->file_name);
|
| 1476 |
|
|
varpool = input_varpool_1 (file_data, ib);
|
| 1477 |
|
|
lto_destroy_simple_input_block (file_data, LTO_section_varpool,
|
| 1478 |
|
|
ib, data, len);
|
| 1479 |
|
|
|
| 1480 |
|
|
ib = lto_create_simple_input_block (file_data, LTO_section_refs,
|
| 1481 |
|
|
&data, &len);
|
| 1482 |
|
|
if (!ib)
|
| 1483 |
|
|
fatal_error("cannot find LTO section refs in %s", file_data->file_name);
|
| 1484 |
|
|
input_refs (ib, nodes, varpool);
|
| 1485 |
|
|
lto_destroy_simple_input_block (file_data, LTO_section_refs,
|
| 1486 |
|
|
ib, data, len);
|
| 1487 |
|
|
if (flag_ltrans)
|
| 1488 |
|
|
input_cgraph_opt_summary (nodes);
|
| 1489 |
|
|
VEC_free (cgraph_node_ptr, heap, nodes);
|
| 1490 |
|
|
VEC_free (varpool_node_ptr, heap, varpool);
|
| 1491 |
|
|
}
|
| 1492 |
|
|
|
| 1493 |
|
|
merge_profile_summaries (file_data_vec);
|
| 1494 |
|
|
|
| 1495 |
|
|
/* Clear out the aux field that was used to store enough state to
|
| 1496 |
|
|
tell which nodes should be overwritten. */
|
| 1497 |
|
|
for (node = cgraph_nodes; node; node = node->next)
|
| 1498 |
|
|
{
|
| 1499 |
|
|
/* Some nodes may have been created by cgraph_node. This
|
| 1500 |
|
|
happens when the callgraph contains nested functions. If the
|
| 1501 |
|
|
node for the parent function was never emitted to the gimple
|
| 1502 |
|
|
file, cgraph_node will create a node for it when setting the
|
| 1503 |
|
|
context of the nested function. */
|
| 1504 |
|
|
if (node->local.lto_file_data)
|
| 1505 |
|
|
node->aux = NULL;
|
| 1506 |
|
|
}
|
| 1507 |
|
|
}
|
| 1508 |
|
|
|
| 1509 |
|
|
/* True when we need optimization summary for NODE. */
|
| 1510 |
|
|
|
| 1511 |
|
|
static int
|
| 1512 |
|
|
output_cgraph_opt_summary_p (struct cgraph_node *node,
|
| 1513 |
|
|
cgraph_node_set set ATTRIBUTE_UNUSED)
|
| 1514 |
|
|
{
|
| 1515 |
|
|
return (node->clone_of
|
| 1516 |
|
|
&& (node->clone.tree_map
|
| 1517 |
|
|
|| node->clone.args_to_skip
|
| 1518 |
|
|
|| node->clone.combined_args_to_skip));
|
| 1519 |
|
|
}
|
| 1520 |
|
|
|
| 1521 |
|
|
/* Output optimization summary for EDGE to OB. */
|
| 1522 |
|
|
static void
|
| 1523 |
|
|
output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
|
| 1524 |
|
|
struct cgraph_edge *edge ATTRIBUTE_UNUSED)
|
| 1525 |
|
|
{
|
| 1526 |
|
|
}
|
| 1527 |
|
|
|
| 1528 |
|
|
/* Output optimization summary for NODE to OB. */
|
| 1529 |
|
|
|
| 1530 |
|
|
static void
|
| 1531 |
|
|
output_node_opt_summary (struct output_block *ob,
|
| 1532 |
|
|
struct cgraph_node *node,
|
| 1533 |
|
|
cgraph_node_set set)
|
| 1534 |
|
|
{
|
| 1535 |
|
|
unsigned int index;
|
| 1536 |
|
|
bitmap_iterator bi;
|
| 1537 |
|
|
struct ipa_replace_map *map;
|
| 1538 |
|
|
struct bitpack_d bp;
|
| 1539 |
|
|
int i;
|
| 1540 |
|
|
struct cgraph_edge *e;
|
| 1541 |
|
|
|
| 1542 |
|
|
if (node->clone.args_to_skip)
|
| 1543 |
|
|
{
|
| 1544 |
|
|
streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
|
| 1545 |
|
|
EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
|
| 1546 |
|
|
streamer_write_uhwi (ob, index);
|
| 1547 |
|
|
}
|
| 1548 |
|
|
else
|
| 1549 |
|
|
streamer_write_uhwi (ob, 0);
|
| 1550 |
|
|
if (node->clone.combined_args_to_skip)
|
| 1551 |
|
|
{
|
| 1552 |
|
|
streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
|
| 1553 |
|
|
EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
|
| 1554 |
|
|
streamer_write_uhwi (ob, index);
|
| 1555 |
|
|
}
|
| 1556 |
|
|
else
|
| 1557 |
|
|
streamer_write_uhwi (ob, 0);
|
| 1558 |
|
|
streamer_write_uhwi (ob, VEC_length (ipa_replace_map_p,
|
| 1559 |
|
|
node->clone.tree_map));
|
| 1560 |
|
|
FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
|
| 1561 |
|
|
{
|
| 1562 |
|
|
int parm_num;
|
| 1563 |
|
|
tree parm;
|
| 1564 |
|
|
|
| 1565 |
|
|
for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm;
|
| 1566 |
|
|
parm = DECL_CHAIN (parm), parm_num++)
|
| 1567 |
|
|
if (map->old_tree == parm)
|
| 1568 |
|
|
break;
|
| 1569 |
|
|
/* At the moment we assume all old trees to be PARM_DECLs, because we have no
|
| 1570 |
|
|
mechanism to store function local declarations into summaries. */
|
| 1571 |
|
|
gcc_assert (parm);
|
| 1572 |
|
|
streamer_write_uhwi (ob, parm_num);
|
| 1573 |
|
|
stream_write_tree (ob, map->new_tree, true);
|
| 1574 |
|
|
bp = bitpack_create (ob->main_stream);
|
| 1575 |
|
|
bp_pack_value (&bp, map->replace_p, 1);
|
| 1576 |
|
|
bp_pack_value (&bp, map->ref_p, 1);
|
| 1577 |
|
|
streamer_write_bitpack (&bp);
|
| 1578 |
|
|
}
|
| 1579 |
|
|
|
| 1580 |
|
|
if (cgraph_node_in_set_p (node, set))
|
| 1581 |
|
|
{
|
| 1582 |
|
|
for (e = node->callees; e; e = e->next_callee)
|
| 1583 |
|
|
output_edge_opt_summary (ob, e);
|
| 1584 |
|
|
for (e = node->indirect_calls; e; e = e->next_callee)
|
| 1585 |
|
|
output_edge_opt_summary (ob, e);
|
| 1586 |
|
|
}
|
| 1587 |
|
|
}
|
| 1588 |
|
|
|
| 1589 |
|
|
/* Output optimization summaries stored in callgraph.
|
| 1590 |
|
|
At the moment it is the clone info structure. */
|
| 1591 |
|
|
|
| 1592 |
|
|
static void
|
| 1593 |
|
|
output_cgraph_opt_summary (cgraph_node_set set)
|
| 1594 |
|
|
{
|
| 1595 |
|
|
struct cgraph_node *node;
|
| 1596 |
|
|
int i, n_nodes;
|
| 1597 |
|
|
lto_cgraph_encoder_t encoder;
|
| 1598 |
|
|
struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
|
| 1599 |
|
|
unsigned count = 0;
|
| 1600 |
|
|
|
| 1601 |
|
|
ob->cgraph_node = NULL;
|
| 1602 |
|
|
encoder = ob->decl_state->cgraph_node_encoder;
|
| 1603 |
|
|
n_nodes = lto_cgraph_encoder_size (encoder);
|
| 1604 |
|
|
for (i = 0; i < n_nodes; i++)
|
| 1605 |
|
|
if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i),
|
| 1606 |
|
|
set))
|
| 1607 |
|
|
count++;
|
| 1608 |
|
|
streamer_write_uhwi (ob, count);
|
| 1609 |
|
|
for (i = 0; i < n_nodes; i++)
|
| 1610 |
|
|
{
|
| 1611 |
|
|
node = lto_cgraph_encoder_deref (encoder, i);
|
| 1612 |
|
|
if (output_cgraph_opt_summary_p (node, set))
|
| 1613 |
|
|
{
|
| 1614 |
|
|
streamer_write_uhwi (ob, i);
|
| 1615 |
|
|
output_node_opt_summary (ob, node, set);
|
| 1616 |
|
|
}
|
| 1617 |
|
|
}
|
| 1618 |
|
|
produce_asm (ob, NULL);
|
| 1619 |
|
|
destroy_output_block (ob);
|
| 1620 |
|
|
}
|
| 1621 |
|
|
|
| 1622 |
|
|
/* Input optimisation summary of EDGE. */
|
| 1623 |
|
|
|
| 1624 |
|
|
static void
|
| 1625 |
|
|
input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
|
| 1626 |
|
|
struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
|
| 1627 |
|
|
{
|
| 1628 |
|
|
}
|
| 1629 |
|
|
|
| 1630 |
|
|
/* Input optimisation summary of NODE. */
|
| 1631 |
|
|
|
| 1632 |
|
|
static void
|
| 1633 |
|
|
input_node_opt_summary (struct cgraph_node *node,
|
| 1634 |
|
|
struct lto_input_block *ib_main,
|
| 1635 |
|
|
struct data_in *data_in)
|
| 1636 |
|
|
{
|
| 1637 |
|
|
int i;
|
| 1638 |
|
|
int count;
|
| 1639 |
|
|
int bit;
|
| 1640 |
|
|
struct bitpack_d bp;
|
| 1641 |
|
|
struct cgraph_edge *e;
|
| 1642 |
|
|
|
| 1643 |
|
|
count = streamer_read_uhwi (ib_main);
|
| 1644 |
|
|
if (count)
|
| 1645 |
|
|
node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
|
| 1646 |
|
|
for (i = 0; i < count; i++)
|
| 1647 |
|
|
{
|
| 1648 |
|
|
bit = streamer_read_uhwi (ib_main);
|
| 1649 |
|
|
bitmap_set_bit (node->clone.args_to_skip, bit);
|
| 1650 |
|
|
}
|
| 1651 |
|
|
count = streamer_read_uhwi (ib_main);
|
| 1652 |
|
|
if (count)
|
| 1653 |
|
|
node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
|
| 1654 |
|
|
for (i = 0; i < count; i++)
|
| 1655 |
|
|
{
|
| 1656 |
|
|
bit = streamer_read_uhwi (ib_main);
|
| 1657 |
|
|
bitmap_set_bit (node->clone.combined_args_to_skip, bit);
|
| 1658 |
|
|
}
|
| 1659 |
|
|
count = streamer_read_uhwi (ib_main);
|
| 1660 |
|
|
for (i = 0; i < count; i++)
|
| 1661 |
|
|
{
|
| 1662 |
|
|
int parm_num;
|
| 1663 |
|
|
tree parm;
|
| 1664 |
|
|
struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
|
| 1665 |
|
|
|
| 1666 |
|
|
VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
|
| 1667 |
|
|
for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm_num;
|
| 1668 |
|
|
parm = DECL_CHAIN (parm))
|
| 1669 |
|
|
parm_num --;
|
| 1670 |
|
|
map->parm_num = streamer_read_uhwi (ib_main);
|
| 1671 |
|
|
map->old_tree = NULL;
|
| 1672 |
|
|
map->new_tree = stream_read_tree (ib_main, data_in);
|
| 1673 |
|
|
bp = streamer_read_bitpack (ib_main);
|
| 1674 |
|
|
map->replace_p = bp_unpack_value (&bp, 1);
|
| 1675 |
|
|
map->ref_p = bp_unpack_value (&bp, 1);
|
| 1676 |
|
|
}
|
| 1677 |
|
|
for (e = node->callees; e; e = e->next_callee)
|
| 1678 |
|
|
input_edge_opt_summary (e, ib_main);
|
| 1679 |
|
|
for (e = node->indirect_calls; e; e = e->next_callee)
|
| 1680 |
|
|
input_edge_opt_summary (e, ib_main);
|
| 1681 |
|
|
}
|
| 1682 |
|
|
|
| 1683 |
|
|
/* Read section in file FILE_DATA of length LEN with data DATA. */
|
| 1684 |
|
|
|
| 1685 |
|
|
static void
|
| 1686 |
|
|
input_cgraph_opt_section (struct lto_file_decl_data *file_data,
|
| 1687 |
|
|
const char *data, size_t len, VEC (cgraph_node_ptr,
|
| 1688 |
|
|
heap) * nodes)
|
| 1689 |
|
|
{
|
| 1690 |
|
|
const struct lto_function_header *header =
|
| 1691 |
|
|
(const struct lto_function_header *) data;
|
| 1692 |
|
|
const int cfg_offset = sizeof (struct lto_function_header);
|
| 1693 |
|
|
const int main_offset = cfg_offset + header->cfg_size;
|
| 1694 |
|
|
const int string_offset = main_offset + header->main_size;
|
| 1695 |
|
|
struct data_in *data_in;
|
| 1696 |
|
|
struct lto_input_block ib_main;
|
| 1697 |
|
|
unsigned int i;
|
| 1698 |
|
|
unsigned int count;
|
| 1699 |
|
|
|
| 1700 |
|
|
LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
|
| 1701 |
|
|
header->main_size);
|
| 1702 |
|
|
|
| 1703 |
|
|
data_in =
|
| 1704 |
|
|
lto_data_in_create (file_data, (const char *) data + string_offset,
|
| 1705 |
|
|
header->string_size, NULL);
|
| 1706 |
|
|
count = streamer_read_uhwi (&ib_main);
|
| 1707 |
|
|
|
| 1708 |
|
|
for (i = 0; i < count; i++)
|
| 1709 |
|
|
{
|
| 1710 |
|
|
int ref = streamer_read_uhwi (&ib_main);
|
| 1711 |
|
|
input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
|
| 1712 |
|
|
&ib_main, data_in);
|
| 1713 |
|
|
}
|
| 1714 |
|
|
lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
|
| 1715 |
|
|
len);
|
| 1716 |
|
|
lto_data_in_delete (data_in);
|
| 1717 |
|
|
}
|
| 1718 |
|
|
|
| 1719 |
|
|
/* Input optimization summary of cgraph. */
|
| 1720 |
|
|
|
| 1721 |
|
|
static void
|
| 1722 |
|
|
input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
|
| 1723 |
|
|
{
|
| 1724 |
|
|
struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
|
| 1725 |
|
|
struct lto_file_decl_data *file_data;
|
| 1726 |
|
|
unsigned int j = 0;
|
| 1727 |
|
|
|
| 1728 |
|
|
while ((file_data = file_data_vec[j++]))
|
| 1729 |
|
|
{
|
| 1730 |
|
|
size_t len;
|
| 1731 |
|
|
const char *data =
|
| 1732 |
|
|
lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
|
| 1733 |
|
|
&len);
|
| 1734 |
|
|
|
| 1735 |
|
|
if (data)
|
| 1736 |
|
|
input_cgraph_opt_section (file_data, data, len, nodes);
|
| 1737 |
|
|
}
|
| 1738 |
|
|
}
|