OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [tree-inline.c] - Blame information for rev 867

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Tree inlining.
2
   Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
3
   2012 Free Software Foundation, Inc.
4
   Contributed by Alexandre Oliva <aoliva@redhat.com>
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify
9
it under the terms of the GNU General Public License as published by
10
the Free Software Foundation; either version 3, or (at your option)
11
any later version.
12
 
13
GCC is distributed in the hope that it will be useful,
14
but WITHOUT ANY WARRANTY; without even the implied warranty of
15
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16
GNU General Public License for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "diagnostic-core.h"
27
#include "tree.h"
28
#include "tree-inline.h"
29
#include "flags.h"
30
#include "params.h"
31
#include "input.h"
32
#include "insn-config.h"
33
#include "hashtab.h"
34
#include "langhooks.h"
35
#include "basic-block.h"
36
#include "tree-iterator.h"
37
#include "cgraph.h"
38
#include "intl.h"
39
#include "tree-mudflap.h"
40
#include "tree-flow.h"
41
#include "function.h"
42
#include "tree-flow.h"
43
#include "tree-pretty-print.h"
44
#include "except.h"
45
#include "debug.h"
46
#include "pointer-set.h"
47
#include "ipa-prop.h"
48
#include "value-prof.h"
49
#include "tree-pass.h"
50
#include "target.h"
51
#include "integrate.h"
52
 
53
#include "rtl.h"        /* FIXME: For asm_str_count.  */
54
 
55
/* I'm not real happy about this, but we need to handle gimple and
56
   non-gimple trees.  */
57
#include "gimple.h"
58
 
59
/* Inlining, Cloning, Versioning, Parallelization
60
 
61
   Inlining: a function body is duplicated, but the PARM_DECLs are
62
   remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63
   MODIFY_EXPRs that store to a dedicated returned-value variable.
64
   The duplicated eh_region info of the copy will later be appended
65
   to the info for the caller; the eh_region info in copied throwing
66
   statements and RESX statements are adjusted accordingly.
67
 
68
   Cloning: (only in C++) We have one body for a con/de/structor, and
69
   multiple function decls, each with a unique parameter list.
70
   Duplicate the body, using the given splay tree; some parameters
71
   will become constants (like 0 or 1).
72
 
73
   Versioning: a function body is duplicated and the result is a new
74
   function rather than into blocks of an existing function as with
75
   inlining.  Some parameters will become constants.
76
 
77
   Parallelization: a region of a function is duplicated resulting in
78
   a new function.  Variables may be replaced with complex expressions
79
   to enable shared variable semantics.
80
 
81
   All of these will simultaneously lookup any callgraph edges.  If
82
   we're going to inline the duplicated function body, and the given
83
   function has some cloned callgraph nodes (one for each place this
84
   function will be inlined) those callgraph edges will be duplicated.
85
   If we're cloning the body, those callgraph edges will be
86
   updated to point into the new body.  (Note that the original
87
   callgraph node and edge list will not be altered.)
88
 
89
   See the CALL_EXPR handling case in copy_tree_body_r ().  */
90
 
91
/* To Do:
92
 
93
   o In order to make inlining-on-trees work, we pessimized
94
     function-local static constants.  In particular, they are now
95
     always output, even when not addressed.  Fix this by treating
96
     function-local static constants just like global static
97
     constants; the back-end already knows not to output them if they
98
     are not needed.
99
 
100
   o Provide heuristics to clamp inlining of recursive template
101
     calls?  */
102
 
103
 
104
/* Weights that estimate_num_insns uses to estimate the size of the
105
   produced code.  */
106
 
107
eni_weights eni_size_weights;
108
 
109
/* Weights that estimate_num_insns uses to estimate the time necessary
110
   to execute the produced code.  */
111
 
112
eni_weights eni_time_weights;
113
 
114
/* Prototypes.  */
115
 
116
static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
117
static void remap_block (tree *, copy_body_data *);
118
static void copy_bind_expr (tree *, int *, copy_body_data *);
119
static tree mark_local_for_remap_r (tree *, int *, void *);
120
static void unsave_expr_1 (tree);
121
static tree unsave_r (tree *, int *, void *);
122
static void declare_inline_vars (tree, tree);
123
static void remap_save_expr (tree *, void *, int *);
124
static void prepend_lexical_block (tree current_block, tree new_block);
125
static tree copy_decl_to_var (tree, copy_body_data *);
126
static tree copy_result_decl_to_var (tree, copy_body_data *);
127
static tree copy_decl_maybe_to_var (tree, copy_body_data *);
128
static gimple remap_gimple_stmt (gimple, copy_body_data *);
129
static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
130
 
131
/* Insert a tree->tree mapping for ID.  Despite the name suggests
132
   that the trees should be variables, it is used for more than that.  */
133
 
134
void
135
insert_decl_map (copy_body_data *id, tree key, tree value)
136
{
137
  *pointer_map_insert (id->decl_map, key) = value;
138
 
139
  /* Always insert an identity map as well.  If we see this same new
140
     node again, we won't want to duplicate it a second time.  */
141
  if (key != value)
142
    *pointer_map_insert (id->decl_map, value) = value;
143
}
144
 
145
/* Insert a tree->tree mapping for ID.  This is only used for
146
   variables.  */
147
 
148
static void
149
insert_debug_decl_map (copy_body_data *id, tree key, tree value)
150
{
151
  if (!gimple_in_ssa_p (id->src_cfun))
152
    return;
153
 
154
  if (!MAY_HAVE_DEBUG_STMTS)
155
    return;
156
 
157
  if (!target_for_debug_bind (key))
158
    return;
159
 
160
  gcc_assert (TREE_CODE (key) == PARM_DECL);
161
  gcc_assert (TREE_CODE (value) == VAR_DECL);
162
 
163
  if (!id->debug_map)
164
    id->debug_map = pointer_map_create ();
165
 
166
  *pointer_map_insert (id->debug_map, key) = value;
167
}
168
 
169
/* If nonzero, we're remapping the contents of inlined debug
170
   statements.  If negative, an error has occurred, such as a
171
   reference to a variable that isn't available in the inlined
172
   context.  */
173
static int processing_debug_stmt = 0;
174
 
175
/* Construct new SSA name for old NAME. ID is the inline context.  */
176
 
177
static tree
178
remap_ssa_name (tree name, copy_body_data *id)
179
{
180
  tree new_tree;
181
  tree *n;
182
 
183
  gcc_assert (TREE_CODE (name) == SSA_NAME);
184
 
185
  n = (tree *) pointer_map_contains (id->decl_map, name);
186
  if (n)
187
    return unshare_expr (*n);
188
 
189
  if (processing_debug_stmt)
190
    {
191
      if (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
192
          && SSA_NAME_IS_DEFAULT_DEF (name)
193
          && id->entry_bb == NULL
194
          && single_succ_p (ENTRY_BLOCK_PTR))
195
        {
196
          tree vexpr = make_node (DEBUG_EXPR_DECL);
197
          gimple def_temp;
198
          gimple_stmt_iterator gsi;
199
          tree val = SSA_NAME_VAR (name);
200
 
201
          n = (tree *) pointer_map_contains (id->decl_map, val);
202
          if (n != NULL)
203
            val = *n;
204
          if (TREE_CODE (val) != PARM_DECL)
205
            {
206
              processing_debug_stmt = -1;
207
              return name;
208
            }
209
          def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
210
          DECL_ARTIFICIAL (vexpr) = 1;
211
          TREE_TYPE (vexpr) = TREE_TYPE (name);
212
          DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
213
          gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
214
          gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
215
          return vexpr;
216
        }
217
 
218
      processing_debug_stmt = -1;
219
      return name;
220
    }
221
 
222
  /* Do not set DEF_STMT yet as statement is not copied yet. We do that
223
     in copy_bb.  */
224
  new_tree = remap_decl (SSA_NAME_VAR (name), id);
225
 
226
  /* We might've substituted constant or another SSA_NAME for
227
     the variable.
228
 
229
     Replace the SSA name representing RESULT_DECL by variable during
230
     inlining:  this saves us from need to introduce PHI node in a case
231
     return value is just partly initialized.  */
232
  if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
233
      && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
234
          || !id->transform_return_to_modify))
235
    {
236
      struct ptr_info_def *pi;
237
      new_tree = make_ssa_name (new_tree, NULL);
238
      insert_decl_map (id, name, new_tree);
239
      SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
240
        = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
241
      TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
242
      /* At least IPA points-to info can be directly transferred.  */
243
      if (id->src_cfun->gimple_df
244
          && id->src_cfun->gimple_df->ipa_pta
245
          && (pi = SSA_NAME_PTR_INFO (name))
246
          && !pi->pt.anything)
247
        {
248
          struct ptr_info_def *new_pi = get_ptr_info (new_tree);
249
          new_pi->pt = pi->pt;
250
        }
251
      if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
252
        {
253
          /* By inlining function having uninitialized variable, we might
254
             extend the lifetime (variable might get reused).  This cause
255
             ICE in the case we end up extending lifetime of SSA name across
256
             abnormal edge, but also increase register pressure.
257
 
258
             We simply initialize all uninitialized vars by 0 except
259
             for case we are inlining to very first BB.  We can avoid
260
             this for all BBs that are not inside strongly connected
261
             regions of the CFG, but this is expensive to test.  */
262
          if (id->entry_bb
263
              && is_gimple_reg (SSA_NAME_VAR (name))
264
              && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
265
              && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
266
              && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
267
                  || EDGE_COUNT (id->entry_bb->preds) != 1))
268
            {
269
              gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
270
              gimple init_stmt;
271
              tree zero = build_zero_cst (TREE_TYPE (new_tree));
272
 
273
              init_stmt = gimple_build_assign (new_tree, zero);
274
              gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
275
              SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
276
            }
277
          else
278
            {
279
              SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
280
              if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
281
                  == name)
282
                set_default_def (SSA_NAME_VAR (new_tree), new_tree);
283
            }
284
        }
285
    }
286
  else
287
    insert_decl_map (id, name, new_tree);
288
  return new_tree;
289
}
290
 
291
/* Remap DECL during the copying of the BLOCK tree for the function.  */
292
 
293
tree
294
remap_decl (tree decl, copy_body_data *id)
295
{
296
  tree *n;
297
 
298
  /* We only remap local variables in the current function.  */
299
 
300
  /* See if we have remapped this declaration.  */
301
 
302
  n = (tree *) pointer_map_contains (id->decl_map, decl);
303
 
304
  if (!n && processing_debug_stmt)
305
    {
306
      processing_debug_stmt = -1;
307
      return decl;
308
    }
309
 
310
  /* If we didn't already have an equivalent for this declaration,
311
     create one now.  */
312
  if (!n)
313
    {
314
      /* Make a copy of the variable or label.  */
315
      tree t = id->copy_decl (decl, id);
316
 
317
      /* Remember it, so that if we encounter this local entity again
318
         we can reuse this copy.  Do this early because remap_type may
319
         need this decl for TYPE_STUB_DECL.  */
320
      insert_decl_map (id, decl, t);
321
 
322
      if (!DECL_P (t))
323
        return t;
324
 
325
      /* Remap types, if necessary.  */
326
      TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
327
      if (TREE_CODE (t) == TYPE_DECL)
328
        DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
329
 
330
      /* Remap sizes as necessary.  */
331
      walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
332
      walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
333
 
334
      /* If fields, do likewise for offset and qualifier.  */
335
      if (TREE_CODE (t) == FIELD_DECL)
336
        {
337
          walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
338
          if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
339
            walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
340
        }
341
 
342
      if ((TREE_CODE (t) == VAR_DECL
343
           || TREE_CODE (t) == RESULT_DECL
344
           || TREE_CODE (t) == PARM_DECL)
345
          && id->src_fn && DECL_STRUCT_FUNCTION (id->src_fn)
346
          && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
347
          /* We don't want to mark as referenced VAR_DECLs that were
348
             not marked as such in the src function.  */
349
          && (TREE_CODE (decl) != VAR_DECL
350
              || referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
351
                                        DECL_UID (decl))))
352
        add_referenced_var (t);
353
      return t;
354
    }
355
 
356
  if (id->do_not_unshare)
357
    return *n;
358
  else
359
    return unshare_expr (*n);
360
}
361
 
362
static tree
363
remap_type_1 (tree type, copy_body_data *id)
364
{
365
  tree new_tree, t;
366
 
367
  /* We do need a copy.  build and register it now.  If this is a pointer or
368
     reference type, remap the designated type and make a new pointer or
369
     reference type.  */
370
  if (TREE_CODE (type) == POINTER_TYPE)
371
    {
372
      new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
373
                                         TYPE_MODE (type),
374
                                         TYPE_REF_CAN_ALIAS_ALL (type));
375
      if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
376
        new_tree = build_type_attribute_qual_variant (new_tree,
377
                                                      TYPE_ATTRIBUTES (type),
378
                                                      TYPE_QUALS (type));
379
      insert_decl_map (id, type, new_tree);
380
      return new_tree;
381
    }
382
  else if (TREE_CODE (type) == REFERENCE_TYPE)
383
    {
384
      new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
385
                                            TYPE_MODE (type),
386
                                            TYPE_REF_CAN_ALIAS_ALL (type));
387
      if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
388
        new_tree = build_type_attribute_qual_variant (new_tree,
389
                                                      TYPE_ATTRIBUTES (type),
390
                                                      TYPE_QUALS (type));
391
      insert_decl_map (id, type, new_tree);
392
      return new_tree;
393
    }
394
  else
395
    new_tree = copy_node (type);
396
 
397
  insert_decl_map (id, type, new_tree);
398
 
399
  /* This is a new type, not a copy of an old type.  Need to reassociate
400
     variants.  We can handle everything except the main variant lazily.  */
401
  t = TYPE_MAIN_VARIANT (type);
402
  if (type != t)
403
    {
404
      t = remap_type (t, id);
405
      TYPE_MAIN_VARIANT (new_tree) = t;
406
      TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
407
      TYPE_NEXT_VARIANT (t) = new_tree;
408
    }
409
  else
410
    {
411
      TYPE_MAIN_VARIANT (new_tree) = new_tree;
412
      TYPE_NEXT_VARIANT (new_tree) = NULL;
413
    }
414
 
415
  if (TYPE_STUB_DECL (type))
416
    TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
417
 
418
  /* Lazily create pointer and reference types.  */
419
  TYPE_POINTER_TO (new_tree) = NULL;
420
  TYPE_REFERENCE_TO (new_tree) = NULL;
421
 
422
  switch (TREE_CODE (new_tree))
423
    {
424
    case INTEGER_TYPE:
425
    case REAL_TYPE:
426
    case FIXED_POINT_TYPE:
427
    case ENUMERAL_TYPE:
428
    case BOOLEAN_TYPE:
429
      t = TYPE_MIN_VALUE (new_tree);
430
      if (t && TREE_CODE (t) != INTEGER_CST)
431
        walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
432
 
433
      t = TYPE_MAX_VALUE (new_tree);
434
      if (t && TREE_CODE (t) != INTEGER_CST)
435
        walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
436
      return new_tree;
437
 
438
    case FUNCTION_TYPE:
439
      TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
440
      walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
441
      return new_tree;
442
 
443
    case ARRAY_TYPE:
444
      TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
445
      TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
446
      break;
447
 
448
    case RECORD_TYPE:
449
    case UNION_TYPE:
450
    case QUAL_UNION_TYPE:
451
      {
452
        tree f, nf = NULL;
453
 
454
        for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
455
          {
456
            t = remap_decl (f, id);
457
            DECL_CONTEXT (t) = new_tree;
458
            DECL_CHAIN (t) = nf;
459
            nf = t;
460
          }
461
        TYPE_FIELDS (new_tree) = nreverse (nf);
462
      }
463
      break;
464
 
465
    case OFFSET_TYPE:
466
    default:
467
      /* Shouldn't have been thought variable sized.  */
468
      gcc_unreachable ();
469
    }
470
 
471
  walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
472
  walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
473
 
474
  return new_tree;
475
}
476
 
477
tree
478
remap_type (tree type, copy_body_data *id)
479
{
480
  tree *node;
481
  tree tmp;
482
 
483
  if (type == NULL)
484
    return type;
485
 
486
  /* See if we have remapped this type.  */
487
  node = (tree *) pointer_map_contains (id->decl_map, type);
488
  if (node)
489
    return *node;
490
 
491
  /* The type only needs remapping if it's variably modified.  */
492
  if (! variably_modified_type_p (type, id->src_fn))
493
    {
494
      insert_decl_map (id, type, type);
495
      return type;
496
    }
497
 
498
  id->remapping_type_depth++;
499
  tmp = remap_type_1 (type, id);
500
  id->remapping_type_depth--;
501
 
502
  return tmp;
503
}
504
 
505
/* Return previously remapped type of TYPE in ID.  Return NULL if TYPE
506
   is NULL or TYPE has not been remapped before.  */
507
 
508
static tree
509
remapped_type (tree type, copy_body_data *id)
510
{
511
  tree *node;
512
 
513
  if (type == NULL)
514
    return type;
515
 
516
  /* See if we have remapped this type.  */
517
  node = (tree *) pointer_map_contains (id->decl_map, type);
518
  if (node)
519
    return *node;
520
  else
521
    return NULL;
522
}
523
 
524
  /* The type only needs remapping if it's variably modified.  */
525
/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
526
 
527
static bool
528
can_be_nonlocal (tree decl, copy_body_data *id)
529
{
530
  /* We can not duplicate function decls.  */
531
  if (TREE_CODE (decl) == FUNCTION_DECL)
532
    return true;
533
 
534
  /* Local static vars must be non-local or we get multiple declaration
535
     problems.  */
536
  if (TREE_CODE (decl) == VAR_DECL
537
      && !auto_var_in_fn_p (decl, id->src_fn))
538
    return true;
539
 
540
  /* At the moment dwarf2out can handle only these types of nodes.  We
541
     can support more later.  */
542
  if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
543
    return false;
544
 
545
  /* We must use global type.  We call remapped_type instead of
546
     remap_type since we don't want to remap this type here if it
547
     hasn't been remapped before.  */
548
  if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
549
    return false;
550
 
551
  /* Wihtout SSA we can't tell if variable is used.  */
552
  if (!gimple_in_ssa_p (cfun))
553
    return false;
554
 
555
  /* Live variables must be copied so we can attach DECL_RTL.  */
556
  if (var_ann (decl))
557
    return false;
558
 
559
  return true;
560
}
561
 
562
static tree
563
remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
564
{
565
  tree old_var;
566
  tree new_decls = NULL_TREE;
567
 
568
  /* Remap its variables.  */
569
  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
570
    {
571
      tree new_var;
572
 
573
      if (can_be_nonlocal (old_var, id))
574
        {
575
          if (TREE_CODE (old_var) == VAR_DECL
576
              && ! DECL_EXTERNAL (old_var)
577
              && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
578
            add_local_decl (cfun, old_var);
579
          if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
580
              && !DECL_IGNORED_P (old_var)
581
              && nonlocalized_list)
582
            VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
583
          continue;
584
        }
585
 
586
      /* Remap the variable.  */
587
      new_var = remap_decl (old_var, id);
588
 
589
      /* If we didn't remap this variable, we can't mess with its
590
         TREE_CHAIN.  If we remapped this variable to the return slot, it's
591
         already declared somewhere else, so don't declare it here.  */
592
 
593
      if (new_var == id->retvar)
594
        ;
595
      else if (!new_var)
596
        {
597
          if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
598
              && !DECL_IGNORED_P (old_var)
599
              && nonlocalized_list)
600
            VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
601
        }
602
      else
603
        {
604
          gcc_assert (DECL_P (new_var));
605
          DECL_CHAIN (new_var) = new_decls;
606
          new_decls = new_var;
607
 
608
          /* Also copy value-expressions.  */
609
          if (TREE_CODE (new_var) == VAR_DECL
610
              && DECL_HAS_VALUE_EXPR_P (new_var))
611
            {
612
              tree tem = DECL_VALUE_EXPR (new_var);
613
              bool old_regimplify = id->regimplify;
614
              id->remapping_type_depth++;
615
              walk_tree (&tem, copy_tree_body_r, id, NULL);
616
              id->remapping_type_depth--;
617
              id->regimplify = old_regimplify;
618
              SET_DECL_VALUE_EXPR (new_var, tem);
619
            }
620
        }
621
    }
622
 
623
  return nreverse (new_decls);
624
}
625
 
626
/* Copy the BLOCK to contain remapped versions of the variables
627
   therein.  And hook the new block into the block-tree.  */
628
 
629
static void
630
remap_block (tree *block, copy_body_data *id)
631
{
632
  tree old_block;
633
  tree new_block;
634
 
635
  /* Make the new block.  */
636
  old_block = *block;
637
  new_block = make_node (BLOCK);
638
  TREE_USED (new_block) = TREE_USED (old_block);
639
  BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
640
  BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
641
  BLOCK_NONLOCALIZED_VARS (new_block)
642
    = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
643
  *block = new_block;
644
 
645
  /* Remap its variables.  */
646
  BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
647
                                        &BLOCK_NONLOCALIZED_VARS (new_block),
648
                                        id);
649
 
650
  if (id->transform_lang_insert_block)
651
    id->transform_lang_insert_block (new_block);
652
 
653
  /* Remember the remapped block.  */
654
  insert_decl_map (id, old_block, new_block);
655
}
656
 
657
/* Copy the whole block tree and root it in id->block.  */
658
static tree
659
remap_blocks (tree block, copy_body_data *id)
660
{
661
  tree t;
662
  tree new_tree = block;
663
 
664
  if (!block)
665
    return NULL;
666
 
667
  remap_block (&new_tree, id);
668
  gcc_assert (new_tree != block);
669
  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
670
    prepend_lexical_block (new_tree, remap_blocks (t, id));
671
  /* Blocks are in arbitrary order, but make things slightly prettier and do
672
     not swap order when producing a copy.  */
673
  BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
674
  return new_tree;
675
}
676
 
677
static void
678
copy_statement_list (tree *tp)
679
{
680
  tree_stmt_iterator oi, ni;
681
  tree new_tree;
682
 
683
  new_tree = alloc_stmt_list ();
684
  ni = tsi_start (new_tree);
685
  oi = tsi_start (*tp);
686
  TREE_TYPE (new_tree) = TREE_TYPE (*tp);
687
  *tp = new_tree;
688
 
689
  for (; !tsi_end_p (oi); tsi_next (&oi))
690
    {
691
      tree stmt = tsi_stmt (oi);
692
      if (TREE_CODE (stmt) == STATEMENT_LIST)
693
        /* This copy is not redundant; tsi_link_after will smash this
694
           STATEMENT_LIST into the end of the one we're building, and we
695
           don't want to do that with the original.  */
696
        copy_statement_list (&stmt);
697
      tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
698
    }
699
}
700
 
701
static void
702
copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
703
{
704
  tree block = BIND_EXPR_BLOCK (*tp);
705
  /* Copy (and replace) the statement.  */
706
  copy_tree_r (tp, walk_subtrees, NULL);
707
  if (block)
708
    {
709
      remap_block (&block, id);
710
      BIND_EXPR_BLOCK (*tp) = block;
711
    }
712
 
713
  if (BIND_EXPR_VARS (*tp))
714
    /* This will remap a lot of the same decls again, but this should be
715
       harmless.  */
716
    BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
717
}
718
 
719
 
720
/* Create a new gimple_seq by remapping all the statements in BODY
721
   using the inlining information in ID.  */
722
 
723
static gimple_seq
724
remap_gimple_seq (gimple_seq body, copy_body_data *id)
725
{
726
  gimple_stmt_iterator si;
727
  gimple_seq new_body = NULL;
728
 
729
  for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
730
    {
731
      gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
732
      gimple_seq_add_stmt (&new_body, new_stmt);
733
    }
734
 
735
  return new_body;
736
}
737
 
738
 
739
/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
740
   block using the mapping information in ID.  */
741
 
742
static gimple
743
copy_gimple_bind (gimple stmt, copy_body_data *id)
744
{
745
  gimple new_bind;
746
  tree new_block, new_vars;
747
  gimple_seq body, new_body;
748
 
749
  /* Copy the statement.  Note that we purposely don't use copy_stmt
750
     here because we need to remap statements as we copy.  */
751
  body = gimple_bind_body (stmt);
752
  new_body = remap_gimple_seq (body, id);
753
 
754
  new_block = gimple_bind_block (stmt);
755
  if (new_block)
756
    remap_block (&new_block, id);
757
 
758
  /* This will remap a lot of the same decls again, but this should be
759
     harmless.  */
760
  new_vars = gimple_bind_vars (stmt);
761
  if (new_vars)
762
    new_vars = remap_decls (new_vars, NULL, id);
763
 
764
  new_bind = gimple_build_bind (new_vars, new_body, new_block);
765
 
766
  return new_bind;
767
}
768
 
769
 
770
/* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
771
   'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
772
   WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
773
   recursing into the children nodes of *TP.  */
774
 
775
static tree
776
remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
777
{
778
  struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
779
  copy_body_data *id = (copy_body_data *) wi_p->info;
780
  tree fn = id->src_fn;
781
 
782
  if (TREE_CODE (*tp) == SSA_NAME)
783
    {
784
      *tp = remap_ssa_name (*tp, id);
785
      *walk_subtrees = 0;
786
      return NULL;
787
    }
788
  else if (auto_var_in_fn_p (*tp, fn))
789
    {
790
      /* Local variables and labels need to be replaced by equivalent
791
         variables.  We don't want to copy static variables; there's
792
         only one of those, no matter how many times we inline the
793
         containing function.  Similarly for globals from an outer
794
         function.  */
795
      tree new_decl;
796
 
797
      /* Remap the declaration.  */
798
      new_decl = remap_decl (*tp, id);
799
      gcc_assert (new_decl);
800
      /* Replace this variable with the copy.  */
801
      STRIP_TYPE_NOPS (new_decl);
802
      /* ???  The C++ frontend uses void * pointer zero to initialize
803
         any other type.  This confuses the middle-end type verification.
804
         As cloned bodies do not go through gimplification again the fixup
805
         there doesn't trigger.  */
806
      if (TREE_CODE (new_decl) == INTEGER_CST
807
          && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
808
        new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
809
      *tp = new_decl;
810
      *walk_subtrees = 0;
811
    }
812
  else if (TREE_CODE (*tp) == STATEMENT_LIST)
813
    gcc_unreachable ();
814
  else if (TREE_CODE (*tp) == SAVE_EXPR)
815
    gcc_unreachable ();
816
  else if (TREE_CODE (*tp) == LABEL_DECL
817
           && (!DECL_CONTEXT (*tp)
818
               || decl_function_context (*tp) == id->src_fn))
819
    /* These may need to be remapped for EH handling.  */
820
    *tp = remap_decl (*tp, id);
821
  else if (TYPE_P (*tp))
822
    /* Types may need remapping as well.  */
823
    *tp = remap_type (*tp, id);
824
  else if (CONSTANT_CLASS_P (*tp))
825
    {
826
      /* If this is a constant, we have to copy the node iff the type
827
         will be remapped.  copy_tree_r will not copy a constant.  */
828
      tree new_type = remap_type (TREE_TYPE (*tp), id);
829
 
830
      if (new_type == TREE_TYPE (*tp))
831
        *walk_subtrees = 0;
832
 
833
      else if (TREE_CODE (*tp) == INTEGER_CST)
834
        *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
835
                                  TREE_INT_CST_HIGH (*tp));
836
      else
837
        {
838
          *tp = copy_node (*tp);
839
          TREE_TYPE (*tp) = new_type;
840
        }
841
    }
842
  else
843
    {
844
      /* Otherwise, just copy the node.  Note that copy_tree_r already
845
         knows not to copy VAR_DECLs, etc., so this is safe.  */
846
 
847
      /* We should never have TREE_BLOCK set on non-statements.  */
848
      if (EXPR_P (*tp))
849
        gcc_assert (!TREE_BLOCK (*tp));
850
 
851
      if (TREE_CODE (*tp) == MEM_REF)
852
        {
853
          tree ptr = TREE_OPERAND (*tp, 0);
854
          tree type = remap_type (TREE_TYPE (*tp), id);
855
          tree old = *tp;
856
 
857
          /* We need to re-canonicalize MEM_REFs from inline substitutions
858
             that can happen when a pointer argument is an ADDR_EXPR.
859
             Recurse here manually to allow that.  */
860
          walk_tree (&ptr, remap_gimple_op_r, data, NULL);
861
          *tp = fold_build2 (MEM_REF, type,
862
                             ptr, TREE_OPERAND (*tp, 1));
863
          TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
864
          TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
865
          TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
866
          *walk_subtrees = 0;
867
          return NULL;
868
        }
869
 
870
      /* Here is the "usual case".  Copy this tree node, and then
871
         tweak some special cases.  */
872
      copy_tree_r (tp, walk_subtrees, NULL);
873
 
874
      if (TREE_CODE (*tp) != OMP_CLAUSE)
875
        TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
876
 
877
      /* Global variables we haven't seen yet need to go into referenced
878
         vars.  If not referenced from types only.  */
879
      if (gimple_in_ssa_p (cfun)
880
          && TREE_CODE (*tp) == VAR_DECL
881
          && id->remapping_type_depth == 0
882
          && !processing_debug_stmt)
883
        add_referenced_var (*tp);
884
 
885
      if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
886
        {
887
          /* The copied TARGET_EXPR has never been expanded, even if the
888
             original node was expanded already.  */
889
          TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
890
          TREE_OPERAND (*tp, 3) = NULL_TREE;
891
        }
892
      else if (TREE_CODE (*tp) == ADDR_EXPR)
893
        {
894
          /* Variable substitution need not be simple.  In particular,
895
             the MEM_REF substitution above.  Make sure that
896
             TREE_CONSTANT and friends are up-to-date.  But make sure
897
             to not improperly set TREE_BLOCK on some sub-expressions.  */
898
          int invariant = is_gimple_min_invariant (*tp);
899
          tree block = id->block;
900
          id->block = NULL_TREE;
901
          walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
902
          id->block = block;
903
          recompute_tree_invariant_for_addr_expr (*tp);
904
 
905
          /* If this used to be invariant, but is not any longer,
906
             then regimplification is probably needed.  */
907
          if (invariant && !is_gimple_min_invariant (*tp))
908
            id->regimplify = true;
909
 
910
          *walk_subtrees = 0;
911
        }
912
    }
913
 
914
  /* Keep iterating.  */
915
  return NULL_TREE;
916
}
917
 
918
 
919
/* Called from copy_body_id via walk_tree.  DATA is really a
920
   `copy_body_data *'.  */
921
 
922
tree
923
copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
924
{
925
  copy_body_data *id = (copy_body_data *) data;
926
  tree fn = id->src_fn;
927
  tree new_block;
928
 
929
  /* Begin by recognizing trees that we'll completely rewrite for the
930
     inlining context.  Our output for these trees is completely
931
     different from out input (e.g. RETURN_EXPR is deleted, and morphs
932
     into an edge).  Further down, we'll handle trees that get
933
     duplicated and/or tweaked.  */
934
 
935
  /* When requested, RETURN_EXPRs should be transformed to just the
936
     contained MODIFY_EXPR.  The branch semantics of the return will
937
     be handled elsewhere by manipulating the CFG rather than a statement.  */
938
  if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
939
    {
940
      tree assignment = TREE_OPERAND (*tp, 0);
941
 
942
      /* If we're returning something, just turn that into an
943
         assignment into the equivalent of the original RESULT_DECL.
944
         If the "assignment" is just the result decl, the result
945
         decl has already been set (e.g. a recent "foo (&result_decl,
946
         ...)"); just toss the entire RETURN_EXPR.  */
947
      if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
948
        {
949
          /* Replace the RETURN_EXPR with (a copy of) the
950
             MODIFY_EXPR hanging underneath.  */
951
          *tp = copy_node (assignment);
952
        }
953
      else /* Else the RETURN_EXPR returns no value.  */
954
        {
955
          *tp = NULL;
956
          return (tree) (void *)1;
957
        }
958
    }
959
  else if (TREE_CODE (*tp) == SSA_NAME)
960
    {
961
      *tp = remap_ssa_name (*tp, id);
962
      *walk_subtrees = 0;
963
      return NULL;
964
    }
965
 
966
  /* Local variables and labels need to be replaced by equivalent
967
     variables.  We don't want to copy static variables; there's only
968
     one of those, no matter how many times we inline the containing
969
     function.  Similarly for globals from an outer function.  */
970
  else if (auto_var_in_fn_p (*tp, fn))
971
    {
972
      tree new_decl;
973
 
974
      /* Remap the declaration.  */
975
      new_decl = remap_decl (*tp, id);
976
      gcc_assert (new_decl);
977
      /* Replace this variable with the copy.  */
978
      STRIP_TYPE_NOPS (new_decl);
979
      *tp = new_decl;
980
      *walk_subtrees = 0;
981
    }
982
  else if (TREE_CODE (*tp) == STATEMENT_LIST)
983
    copy_statement_list (tp);
984
  else if (TREE_CODE (*tp) == SAVE_EXPR
985
           || TREE_CODE (*tp) == TARGET_EXPR)
986
    remap_save_expr (tp, id->decl_map, walk_subtrees);
987
  else if (TREE_CODE (*tp) == LABEL_DECL
988
           && (! DECL_CONTEXT (*tp)
989
               || decl_function_context (*tp) == id->src_fn))
990
    /* These may need to be remapped for EH handling.  */
991
    *tp = remap_decl (*tp, id);
992
  else if (TREE_CODE (*tp) == BIND_EXPR)
993
    copy_bind_expr (tp, walk_subtrees, id);
994
  /* Types may need remapping as well.  */
995
  else if (TYPE_P (*tp))
996
    *tp = remap_type (*tp, id);
997
 
998
  /* If this is a constant, we have to copy the node iff the type will be
999
     remapped.  copy_tree_r will not copy a constant.  */
1000
  else if (CONSTANT_CLASS_P (*tp))
1001
    {
1002
      tree new_type = remap_type (TREE_TYPE (*tp), id);
1003
 
1004
      if (new_type == TREE_TYPE (*tp))
1005
        *walk_subtrees = 0;
1006
 
1007
      else if (TREE_CODE (*tp) == INTEGER_CST)
1008
        *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1009
                                  TREE_INT_CST_HIGH (*tp));
1010
      else
1011
        {
1012
          *tp = copy_node (*tp);
1013
          TREE_TYPE (*tp) = new_type;
1014
        }
1015
    }
1016
 
1017
  /* Otherwise, just copy the node.  Note that copy_tree_r already
1018
     knows not to copy VAR_DECLs, etc., so this is safe.  */
1019
  else
1020
    {
1021
      /* Here we handle trees that are not completely rewritten.
1022
         First we detect some inlining-induced bogosities for
1023
         discarding.  */
1024
      if (TREE_CODE (*tp) == MODIFY_EXPR
1025
          && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1026
          && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1027
        {
1028
          /* Some assignments VAR = VAR; don't generate any rtl code
1029
             and thus don't count as variable modification.  Avoid
1030
             keeping bogosities like 0 = 0.  */
1031
          tree decl = TREE_OPERAND (*tp, 0), value;
1032
          tree *n;
1033
 
1034
          n = (tree *) pointer_map_contains (id->decl_map, decl);
1035
          if (n)
1036
            {
1037
              value = *n;
1038
              STRIP_TYPE_NOPS (value);
1039
              if (TREE_CONSTANT (value) || TREE_READONLY (value))
1040
                {
1041
                  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1042
                  return copy_tree_body_r (tp, walk_subtrees, data);
1043
                }
1044
            }
1045
        }
1046
      else if (TREE_CODE (*tp) == INDIRECT_REF)
1047
        {
1048
          /* Get rid of *& from inline substitutions that can happen when a
1049
             pointer argument is an ADDR_EXPR.  */
1050
          tree decl = TREE_OPERAND (*tp, 0);
1051
          tree *n;
1052
 
1053
          n = (tree *) pointer_map_contains (id->decl_map, decl);
1054
          if (n)
1055
            {
1056
              tree new_tree;
1057
              tree old;
1058
              /* If we happen to get an ADDR_EXPR in n->value, strip
1059
                 it manually here as we'll eventually get ADDR_EXPRs
1060
                 which lie about their types pointed to.  In this case
1061
                 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1062
                 but we absolutely rely on that.  As fold_indirect_ref
1063
                 does other useful transformations, try that first, though.  */
1064
              tree type = TREE_TYPE (TREE_TYPE (*n));
1065
              if (id->do_not_unshare)
1066
                new_tree = *n;
1067
              else
1068
                new_tree = unshare_expr (*n);
1069
              old = *tp;
1070
              *tp = gimple_fold_indirect_ref (new_tree);
1071
              if (! *tp)
1072
                {
1073
                  if (TREE_CODE (new_tree) == ADDR_EXPR)
1074
                    {
1075
                      *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1076
                                                 type, new_tree);
1077
                      /* ???  We should either assert here or build
1078
                         a VIEW_CONVERT_EXPR instead of blindly leaking
1079
                         incompatible types to our IL.  */
1080
                      if (! *tp)
1081
                        *tp = TREE_OPERAND (new_tree, 0);
1082
                    }
1083
                  else
1084
                    {
1085
                      *tp = build1 (INDIRECT_REF, type, new_tree);
1086
                      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1087
                      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1088
                      TREE_READONLY (*tp) = TREE_READONLY (old);
1089
                      TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
1090
                    }
1091
                }
1092
              *walk_subtrees = 0;
1093
              return NULL;
1094
            }
1095
        }
1096
      else if (TREE_CODE (*tp) == MEM_REF)
1097
        {
1098
          /* We need to re-canonicalize MEM_REFs from inline substitutions
1099
             that can happen when a pointer argument is an ADDR_EXPR.  */
1100
          tree decl = TREE_OPERAND (*tp, 0);
1101
          tree *n;
1102
 
1103
          n = (tree *) pointer_map_contains (id->decl_map, decl);
1104
          if (n)
1105
            {
1106
              tree old = *tp;
1107
              *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
1108
                                 unshare_expr (*n), TREE_OPERAND (*tp, 1));
1109
              TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1110
              TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1111
              *walk_subtrees = 0;
1112
              return NULL;
1113
            }
1114
        }
1115
 
1116
      /* Here is the "usual case".  Copy this tree node, and then
1117
         tweak some special cases.  */
1118
      copy_tree_r (tp, walk_subtrees, NULL);
1119
 
1120
      /* Global variables we haven't seen yet needs to go into referenced
1121
         vars.  If not referenced from types or debug stmts only.  */
1122
      if (gimple_in_ssa_p (cfun)
1123
          && TREE_CODE (*tp) == VAR_DECL
1124
          && id->remapping_type_depth == 0
1125
          && !processing_debug_stmt)
1126
        add_referenced_var (*tp);
1127
 
1128
      /* If EXPR has block defined, map it to newly constructed block.
1129
         When inlining we want EXPRs without block appear in the block
1130
         of function call if we are not remapping a type.  */
1131
      if (EXPR_P (*tp))
1132
        {
1133
          new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1134
          if (TREE_BLOCK (*tp))
1135
            {
1136
              tree *n;
1137
              n = (tree *) pointer_map_contains (id->decl_map,
1138
                                                 TREE_BLOCK (*tp));
1139
              gcc_assert (n || id->remapping_type_depth != 0);
1140
              if (n)
1141
                new_block = *n;
1142
            }
1143
          TREE_BLOCK (*tp) = new_block;
1144
        }
1145
 
1146
      if (TREE_CODE (*tp) != OMP_CLAUSE)
1147
        TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1148
 
1149
      /* The copied TARGET_EXPR has never been expanded, even if the
1150
         original node was expanded already.  */
1151
      if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1152
        {
1153
          TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1154
          TREE_OPERAND (*tp, 3) = NULL_TREE;
1155
        }
1156
 
1157
      /* Variable substitution need not be simple.  In particular, the
1158
         INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1159
         and friends are up-to-date.  */
1160
      else if (TREE_CODE (*tp) == ADDR_EXPR)
1161
        {
1162
          int invariant = is_gimple_min_invariant (*tp);
1163
          walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1164
 
1165
          /* Handle the case where we substituted an INDIRECT_REF
1166
             into the operand of the ADDR_EXPR.  */
1167
          if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1168
            *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1169
          else
1170
            recompute_tree_invariant_for_addr_expr (*tp);
1171
 
1172
          /* If this used to be invariant, but is not any longer,
1173
             then regimplification is probably needed.  */
1174
          if (invariant && !is_gimple_min_invariant (*tp))
1175
            id->regimplify = true;
1176
 
1177
          *walk_subtrees = 0;
1178
        }
1179
    }
1180
 
1181
  /* Keep iterating.  */
1182
  return NULL_TREE;
1183
}
1184
 
1185
/* Helper for remap_gimple_stmt.  Given an EH region number for the
1186
   source function, map that to the duplicate EH region number in
1187
   the destination function.  */
1188
 
1189
static int
1190
remap_eh_region_nr (int old_nr, copy_body_data *id)
1191
{
1192
  eh_region old_r, new_r;
1193
  void **slot;
1194
 
1195
  old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1196
  slot = pointer_map_contains (id->eh_map, old_r);
1197
  new_r = (eh_region) *slot;
1198
 
1199
  return new_r->index;
1200
}
1201
 
1202
/* Similar, but operate on INTEGER_CSTs.  */
1203
 
1204
static tree
1205
remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1206
{
1207
  int old_nr, new_nr;
1208
 
1209
  old_nr = tree_low_cst (old_t_nr, 0);
1210
  new_nr = remap_eh_region_nr (old_nr, id);
1211
 
1212
  return build_int_cst (integer_type_node, new_nr);
1213
}
1214
 
1215
/* Helper for copy_bb.  Remap statement STMT using the inlining
1216
   information in ID.  Return the new statement copy.  */
1217
 
1218
static gimple
1219
remap_gimple_stmt (gimple stmt, copy_body_data *id)
1220
{
1221
  gimple copy = NULL;
1222
  struct walk_stmt_info wi;
1223
  tree new_block;
1224
  bool skip_first = false;
1225
 
1226
  /* Begin by recognizing trees that we'll completely rewrite for the
1227
     inlining context.  Our output for these trees is completely
1228
     different from out input (e.g. RETURN_EXPR is deleted, and morphs
1229
     into an edge).  Further down, we'll handle trees that get
1230
     duplicated and/or tweaked.  */
1231
 
1232
  /* When requested, GIMPLE_RETURNs should be transformed to just the
1233
     contained GIMPLE_ASSIGN.  The branch semantics of the return will
1234
     be handled elsewhere by manipulating the CFG rather than the
1235
     statement.  */
1236
  if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1237
    {
1238
      tree retval = gimple_return_retval (stmt);
1239
 
1240
      /* If we're returning something, just turn that into an
1241
         assignment into the equivalent of the original RESULT_DECL.
1242
         If RETVAL is just the result decl, the result decl has
1243
         already been set (e.g. a recent "foo (&result_decl, ...)");
1244
         just toss the entire GIMPLE_RETURN.  */
1245
      if (retval
1246
          && (TREE_CODE (retval) != RESULT_DECL
1247
              && (TREE_CODE (retval) != SSA_NAME
1248
                  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1249
        {
1250
          copy = gimple_build_assign (id->retvar, retval);
1251
          /* id->retvar is already substituted.  Skip it on later remapping.  */
1252
          skip_first = true;
1253
        }
1254
      else
1255
        return gimple_build_nop ();
1256
    }
1257
  else if (gimple_has_substatements (stmt))
1258
    {
1259
      gimple_seq s1, s2;
1260
 
1261
      /* When cloning bodies from the C++ front end, we will be handed bodies
1262
         in High GIMPLE form.  Handle here all the High GIMPLE statements that
1263
         have embedded statements.  */
1264
      switch (gimple_code (stmt))
1265
        {
1266
        case GIMPLE_BIND:
1267
          copy = copy_gimple_bind (stmt, id);
1268
          break;
1269
 
1270
        case GIMPLE_CATCH:
1271
          s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1272
          copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1273
          break;
1274
 
1275
        case GIMPLE_EH_FILTER:
1276
          s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1277
          copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1278
          break;
1279
 
1280
        case GIMPLE_TRY:
1281
          s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1282
          s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1283
          copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1284
          break;
1285
 
1286
        case GIMPLE_WITH_CLEANUP_EXPR:
1287
          s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1288
          copy = gimple_build_wce (s1);
1289
          break;
1290
 
1291
        case GIMPLE_OMP_PARALLEL:
1292
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1293
          copy = gimple_build_omp_parallel
1294
                   (s1,
1295
                    gimple_omp_parallel_clauses (stmt),
1296
                    gimple_omp_parallel_child_fn (stmt),
1297
                    gimple_omp_parallel_data_arg (stmt));
1298
          break;
1299
 
1300
        case GIMPLE_OMP_TASK:
1301
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1302
          copy = gimple_build_omp_task
1303
                   (s1,
1304
                    gimple_omp_task_clauses (stmt),
1305
                    gimple_omp_task_child_fn (stmt),
1306
                    gimple_omp_task_data_arg (stmt),
1307
                    gimple_omp_task_copy_fn (stmt),
1308
                    gimple_omp_task_arg_size (stmt),
1309
                    gimple_omp_task_arg_align (stmt));
1310
          break;
1311
 
1312
        case GIMPLE_OMP_FOR:
1313
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1314
          s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1315
          copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1316
                                       gimple_omp_for_collapse (stmt), s2);
1317
          {
1318
            size_t i;
1319
            for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1320
              {
1321
                gimple_omp_for_set_index (copy, i,
1322
                                          gimple_omp_for_index (stmt, i));
1323
                gimple_omp_for_set_initial (copy, i,
1324
                                            gimple_omp_for_initial (stmt, i));
1325
                gimple_omp_for_set_final (copy, i,
1326
                                          gimple_omp_for_final (stmt, i));
1327
                gimple_omp_for_set_incr (copy, i,
1328
                                         gimple_omp_for_incr (stmt, i));
1329
                gimple_omp_for_set_cond (copy, i,
1330
                                         gimple_omp_for_cond (stmt, i));
1331
              }
1332
          }
1333
          break;
1334
 
1335
        case GIMPLE_OMP_MASTER:
1336
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1337
          copy = gimple_build_omp_master (s1);
1338
          break;
1339
 
1340
        case GIMPLE_OMP_ORDERED:
1341
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1342
          copy = gimple_build_omp_ordered (s1);
1343
          break;
1344
 
1345
        case GIMPLE_OMP_SECTION:
1346
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1347
          copy = gimple_build_omp_section (s1);
1348
          break;
1349
 
1350
        case GIMPLE_OMP_SECTIONS:
1351
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1352
          copy = gimple_build_omp_sections
1353
                   (s1, gimple_omp_sections_clauses (stmt));
1354
          break;
1355
 
1356
        case GIMPLE_OMP_SINGLE:
1357
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1358
          copy = gimple_build_omp_single
1359
                   (s1, gimple_omp_single_clauses (stmt));
1360
          break;
1361
 
1362
        case GIMPLE_OMP_CRITICAL:
1363
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1364
          copy
1365
            = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1366
          break;
1367
 
1368
        case GIMPLE_TRANSACTION:
1369
          s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1370
          copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1371
          gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1372
          break;
1373
 
1374
        default:
1375
          gcc_unreachable ();
1376
        }
1377
    }
1378
  else
1379
    {
1380
      if (gimple_assign_copy_p (stmt)
1381
          && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1382
          && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1383
        {
1384
          /* Here we handle statements that are not completely rewritten.
1385
             First we detect some inlining-induced bogosities for
1386
             discarding.  */
1387
 
1388
          /* Some assignments VAR = VAR; don't generate any rtl code
1389
             and thus don't count as variable modification.  Avoid
1390
             keeping bogosities like 0 = 0.  */
1391
          tree decl = gimple_assign_lhs (stmt), value;
1392
          tree *n;
1393
 
1394
          n = (tree *) pointer_map_contains (id->decl_map, decl);
1395
          if (n)
1396
            {
1397
              value = *n;
1398
              STRIP_TYPE_NOPS (value);
1399
              if (TREE_CONSTANT (value) || TREE_READONLY (value))
1400
                return gimple_build_nop ();
1401
            }
1402
        }
1403
 
1404
      if (gimple_debug_bind_p (stmt))
1405
        {
1406
          copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1407
                                          gimple_debug_bind_get_value (stmt),
1408
                                          stmt);
1409
          VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1410
          return copy;
1411
        }
1412
      if (gimple_debug_source_bind_p (stmt))
1413
        {
1414
          copy = gimple_build_debug_source_bind
1415
                   (gimple_debug_source_bind_get_var (stmt),
1416
                    gimple_debug_source_bind_get_value (stmt), stmt);
1417
          VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1418
          return copy;
1419
        }
1420
 
1421
      /* Create a new deep copy of the statement.  */
1422
      copy = gimple_copy (stmt);
1423
 
1424
      /* Remap the region numbers for __builtin_eh_{pointer,filter},
1425
         RESX and EH_DISPATCH.  */
1426
      if (id->eh_map)
1427
        switch (gimple_code (copy))
1428
          {
1429
          case GIMPLE_CALL:
1430
            {
1431
              tree r, fndecl = gimple_call_fndecl (copy);
1432
              if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1433
                switch (DECL_FUNCTION_CODE (fndecl))
1434
                  {
1435
                  case BUILT_IN_EH_COPY_VALUES:
1436
                    r = gimple_call_arg (copy, 1);
1437
                    r = remap_eh_region_tree_nr (r, id);
1438
                    gimple_call_set_arg (copy, 1, r);
1439
                    /* FALLTHRU */
1440
 
1441
                  case BUILT_IN_EH_POINTER:
1442
                  case BUILT_IN_EH_FILTER:
1443
                    r = gimple_call_arg (copy, 0);
1444
                    r = remap_eh_region_tree_nr (r, id);
1445
                    gimple_call_set_arg (copy, 0, r);
1446
                    break;
1447
 
1448
                  default:
1449
                    break;
1450
                  }
1451
 
1452
              /* Reset alias info if we didn't apply measures to
1453
                 keep it valid over inlining by setting DECL_PT_UID.  */
1454
              if (!id->src_cfun->gimple_df
1455
                  || !id->src_cfun->gimple_df->ipa_pta)
1456
                gimple_call_reset_alias_info (copy);
1457
            }
1458
            break;
1459
 
1460
          case GIMPLE_RESX:
1461
            {
1462
              int r = gimple_resx_region (copy);
1463
              r = remap_eh_region_nr (r, id);
1464
              gimple_resx_set_region (copy, r);
1465
            }
1466
            break;
1467
 
1468
          case GIMPLE_EH_DISPATCH:
1469
            {
1470
              int r = gimple_eh_dispatch_region (copy);
1471
              r = remap_eh_region_nr (r, id);
1472
              gimple_eh_dispatch_set_region (copy, r);
1473
            }
1474
            break;
1475
 
1476
          default:
1477
            break;
1478
          }
1479
    }
1480
 
1481
  /* If STMT has a block defined, map it to the newly constructed
1482
     block.  When inlining we want statements without a block to
1483
     appear in the block of the function call.  */
1484
  new_block = id->block;
1485
  if (gimple_block (copy))
1486
    {
1487
      tree *n;
1488
      n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1489
      gcc_assert (n);
1490
      new_block = *n;
1491
    }
1492
 
1493
  gimple_set_block (copy, new_block);
1494
 
1495
  if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1496
    return copy;
1497
 
1498
  /* Remap all the operands in COPY.  */
1499
  memset (&wi, 0, sizeof (wi));
1500
  wi.info = id;
1501
  if (skip_first)
1502
    walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1503
  else
1504
    walk_gimple_op (copy, remap_gimple_op_r, &wi);
1505
 
1506
  /* Clear the copied virtual operands.  We are not remapping them here
1507
     but are going to recreate them from scratch.  */
1508
  if (gimple_has_mem_ops (copy))
1509
    {
1510
      gimple_set_vdef (copy, NULL_TREE);
1511
      gimple_set_vuse (copy, NULL_TREE);
1512
    }
1513
 
1514
  return copy;
1515
}
1516
 
1517
 
1518
/* Copy basic block, scale profile accordingly.  Edges will be taken care of
1519
   later  */
1520
 
1521
static basic_block
1522
copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1523
         gcov_type count_scale)
1524
{
1525
  gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1526
  basic_block copy_basic_block;
1527
  tree decl;
1528
  gcov_type freq;
1529
  basic_block prev;
1530
 
1531
  /* Search for previous copied basic block.  */
1532
  prev = bb->prev_bb;
1533
  while (!prev->aux)
1534
    prev = prev->prev_bb;
1535
 
1536
  /* create_basic_block() will append every new block to
1537
     basic_block_info automatically.  */
1538
  copy_basic_block = create_basic_block (NULL, (void *) 0,
1539
                                         (basic_block) prev->aux);
1540
  copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1541
 
1542
  /* We are going to rebuild frequencies from scratch.  These values
1543
     have just small importance to drive canonicalize_loop_headers.  */
1544
  freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1545
 
1546
  /* We recompute frequencies after inlining, so this is quite safe.  */
1547
  if (freq > BB_FREQ_MAX)
1548
    freq = BB_FREQ_MAX;
1549
  copy_basic_block->frequency = freq;
1550
 
1551
  copy_gsi = gsi_start_bb (copy_basic_block);
1552
 
1553
  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1554
    {
1555
      gimple stmt = gsi_stmt (gsi);
1556
      gimple orig_stmt = stmt;
1557
 
1558
      id->regimplify = false;
1559
      stmt = remap_gimple_stmt (stmt, id);
1560
      if (gimple_nop_p (stmt))
1561
        continue;
1562
 
1563
      gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1564
      seq_gsi = copy_gsi;
1565
 
1566
      /* With return slot optimization we can end up with
1567
         non-gimple (foo *)&this->m, fix that here.  */
1568
      if (is_gimple_assign (stmt)
1569
          && gimple_assign_rhs_code (stmt) == NOP_EXPR
1570
          && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1571
        {
1572
          tree new_rhs;
1573
          new_rhs = force_gimple_operand_gsi (&seq_gsi,
1574
                                              gimple_assign_rhs1 (stmt),
1575
                                              true, NULL, false,
1576
                                              GSI_CONTINUE_LINKING);
1577
          gimple_assign_set_rhs1 (stmt, new_rhs);
1578
          id->regimplify = false;
1579
        }
1580
 
1581
      gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1582
 
1583
      if (id->regimplify)
1584
        gimple_regimplify_operands (stmt, &seq_gsi);
1585
 
1586
      /* If copy_basic_block has been empty at the start of this iteration,
1587
         call gsi_start_bb again to get at the newly added statements.  */
1588
      if (gsi_end_p (copy_gsi))
1589
        copy_gsi = gsi_start_bb (copy_basic_block);
1590
      else
1591
        gsi_next (&copy_gsi);
1592
 
1593
      /* Process the new statement.  The call to gimple_regimplify_operands
1594
         possibly turned the statement into multiple statements, we
1595
         need to process all of them.  */
1596
      do
1597
        {
1598
          tree fn;
1599
 
1600
          stmt = gsi_stmt (copy_gsi);
1601
          if (is_gimple_call (stmt)
1602
              && gimple_call_va_arg_pack_p (stmt)
1603
              && id->gimple_call)
1604
            {
1605
              /* __builtin_va_arg_pack () should be replaced by
1606
                 all arguments corresponding to ... in the caller.  */
1607
              tree p;
1608
              gimple new_call;
1609
              VEC(tree, heap) *argarray;
1610
              size_t nargs = gimple_call_num_args (id->gimple_call);
1611
              size_t n;
1612
 
1613
              for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1614
                nargs--;
1615
 
1616
              /* Create the new array of arguments.  */
1617
              n = nargs + gimple_call_num_args (stmt);
1618
              argarray = VEC_alloc (tree, heap, n);
1619
              VEC_safe_grow (tree, heap, argarray, n);
1620
 
1621
              /* Copy all the arguments before '...'  */
1622
              memcpy (VEC_address (tree, argarray),
1623
                      gimple_call_arg_ptr (stmt, 0),
1624
                      gimple_call_num_args (stmt) * sizeof (tree));
1625
 
1626
              /* Append the arguments passed in '...'  */
1627
              memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1628
                      gimple_call_arg_ptr (id->gimple_call, 0)
1629
                        + (gimple_call_num_args (id->gimple_call) - nargs),
1630
                      nargs * sizeof (tree));
1631
 
1632
              new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1633
                                                argarray);
1634
 
1635
              VEC_free (tree, heap, argarray);
1636
 
1637
              /* Copy all GIMPLE_CALL flags, location and block, except
1638
                 GF_CALL_VA_ARG_PACK.  */
1639
              gimple_call_copy_flags (new_call, stmt);
1640
              gimple_call_set_va_arg_pack (new_call, false);
1641
              gimple_set_location (new_call, gimple_location (stmt));
1642
              gimple_set_block (new_call, gimple_block (stmt));
1643
              gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1644
 
1645
              gsi_replace (&copy_gsi, new_call, false);
1646
              stmt = new_call;
1647
            }
1648
          else if (is_gimple_call (stmt)
1649
                   && id->gimple_call
1650
                   && (decl = gimple_call_fndecl (stmt))
1651
                   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1652
                   && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1653
            {
1654
              /* __builtin_va_arg_pack_len () should be replaced by
1655
                 the number of anonymous arguments.  */
1656
              size_t nargs = gimple_call_num_args (id->gimple_call);
1657
              tree count, p;
1658
              gimple new_stmt;
1659
 
1660
              for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1661
                nargs--;
1662
 
1663
              count = build_int_cst (integer_type_node, nargs);
1664
              new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1665
              gsi_replace (&copy_gsi, new_stmt, false);
1666
              stmt = new_stmt;
1667
            }
1668
 
1669
          /* Statements produced by inlining can be unfolded, especially
1670
             when we constant propagated some operands.  We can't fold
1671
             them right now for two reasons:
1672
             1) folding require SSA_NAME_DEF_STMTs to be correct
1673
             2) we can't change function calls to builtins.
1674
             So we just mark statement for later folding.  We mark
1675
             all new statements, instead just statements that has changed
1676
             by some nontrivial substitution so even statements made
1677
             foldable indirectly are updated.  If this turns out to be
1678
             expensive, copy_body can be told to watch for nontrivial
1679
             changes.  */
1680
          if (id->statements_to_fold)
1681
            pointer_set_insert (id->statements_to_fold, stmt);
1682
 
1683
          /* We're duplicating a CALL_EXPR.  Find any corresponding
1684
             callgraph edges and update or duplicate them.  */
1685
          if (is_gimple_call (stmt))
1686
            {
1687
              struct cgraph_edge *edge;
1688
              int flags;
1689
 
1690
              switch (id->transform_call_graph_edges)
1691
                {
1692
                case CB_CGE_DUPLICATE:
1693
                  edge = cgraph_edge (id->src_node, orig_stmt);
1694
                  if (edge)
1695
                    {
1696
                      int edge_freq = edge->frequency;
1697
                      edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1698
                                                gimple_uid (stmt),
1699
                                                REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1700
                                                true);
1701
                      /* We could also just rescale the frequency, but
1702
                         doing so would introduce roundoff errors and make
1703
                         verifier unhappy.  */
1704
                      edge->frequency
1705
                        = compute_call_stmt_bb_frequency (id->dst_node->decl,
1706
                                                          copy_basic_block);
1707
                      if (dump_file
1708
                          && profile_status_for_function (cfun) != PROFILE_ABSENT
1709
                          && (edge_freq > edge->frequency + 10
1710
                              || edge_freq < edge->frequency - 10))
1711
                        {
1712
                          fprintf (dump_file, "Edge frequency estimated by "
1713
                                   "cgraph %i diverge from inliner's estimate %i\n",
1714
                                   edge_freq,
1715
                                   edge->frequency);
1716
                          fprintf (dump_file,
1717
                                   "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1718
                                   bb->index,
1719
                                   bb->frequency,
1720
                                   copy_basic_block->frequency);
1721
                        }
1722
                      stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1723
                    }
1724
                  break;
1725
 
1726
                case CB_CGE_MOVE_CLONES:
1727
                  cgraph_set_call_stmt_including_clones (id->dst_node,
1728
                                                         orig_stmt, stmt);
1729
                  edge = cgraph_edge (id->dst_node, stmt);
1730
                  break;
1731
 
1732
                case CB_CGE_MOVE:
1733
                  edge = cgraph_edge (id->dst_node, orig_stmt);
1734
                  if (edge)
1735
                    cgraph_set_call_stmt (edge, stmt);
1736
                  break;
1737
 
1738
                default:
1739
                  gcc_unreachable ();
1740
                }
1741
 
1742
              /* Constant propagation on argument done during inlining
1743
                 may create new direct call.  Produce an edge for it.  */
1744
              if ((!edge
1745
                   || (edge->indirect_inlining_edge
1746
                       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1747
                  && id->dst_node->analyzed
1748
                  && (fn = gimple_call_fndecl (stmt)) != NULL)
1749
                {
1750
                  struct cgraph_node *dest = cgraph_get_node (fn);
1751
 
1752
                  /* We have missing edge in the callgraph.  This can happen
1753
                     when previous inlining turned an indirect call into a
1754
                     direct call by constant propagating arguments or we are
1755
                     producing dead clone (for further cloning).  In all
1756
                     other cases we hit a bug (incorrect node sharing is the
1757
                     most common reason for missing edges).  */
1758
                  gcc_assert (dest->needed || !dest->analyzed
1759
                              || dest->address_taken
1760
                              || !id->src_node->analyzed
1761
                              || !id->dst_node->analyzed);
1762
                  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1763
                    cgraph_create_edge_including_clones
1764
                      (id->dst_node, dest, orig_stmt, stmt, bb->count,
1765
                       compute_call_stmt_bb_frequency (id->dst_node->decl,
1766
                                                       copy_basic_block),
1767
                       CIF_ORIGINALLY_INDIRECT_CALL);
1768
                  else
1769
                    cgraph_create_edge (id->dst_node, dest, stmt,
1770
                                        bb->count,
1771
                                        compute_call_stmt_bb_frequency
1772
                                          (id->dst_node->decl, copy_basic_block))->inline_failed
1773
                      = CIF_ORIGINALLY_INDIRECT_CALL;
1774
                  if (dump_file)
1775
                    {
1776
                      fprintf (dump_file, "Created new direct edge to %s\n",
1777
                               cgraph_node_name (dest));
1778
                    }
1779
                }
1780
 
1781
              flags = gimple_call_flags (stmt);
1782
              if (flags & ECF_MAY_BE_ALLOCA)
1783
                cfun->calls_alloca = true;
1784
              if (flags & ECF_RETURNS_TWICE)
1785
                cfun->calls_setjmp = true;
1786
            }
1787
 
1788
          maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1789
                                      id->eh_map, id->eh_lp_nr);
1790
 
1791
          if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1792
            {
1793
              ssa_op_iter i;
1794
              tree def;
1795
 
1796
              find_new_referenced_vars (gsi_stmt (copy_gsi));
1797
              FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1798
                if (TREE_CODE (def) == SSA_NAME)
1799
                  SSA_NAME_DEF_STMT (def) = stmt;
1800
            }
1801
 
1802
          gsi_next (&copy_gsi);
1803
        }
1804
      while (!gsi_end_p (copy_gsi));
1805
 
1806
      copy_gsi = gsi_last_bb (copy_basic_block);
1807
    }
1808
 
1809
  return copy_basic_block;
1810
}
1811
 
1812
/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1813
   form is quite easy, since dominator relationship for old basic blocks does
1814
   not change.
1815
 
1816
   There is however exception where inlining might change dominator relation
1817
   across EH edges from basic block within inlined functions destinating
1818
   to landing pads in function we inline into.
1819
 
1820
   The function fills in PHI_RESULTs of such PHI nodes if they refer
1821
   to gimple regs.  Otherwise, the function mark PHI_RESULT of such
1822
   PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
1823
   EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1824
   set, and this means that there will be no overlapping live ranges
1825
   for the underlying symbol.
1826
 
1827
   This might change in future if we allow redirecting of EH edges and
1828
   we might want to change way build CFG pre-inlining to include
1829
   all the possible edges then.  */
1830
static void
1831
update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1832
                                  bool can_throw, bool nonlocal_goto)
1833
{
1834
  edge e;
1835
  edge_iterator ei;
1836
 
1837
  FOR_EACH_EDGE (e, ei, bb->succs)
1838
    if (!e->dest->aux
1839
        || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1840
      {
1841
        gimple phi;
1842
        gimple_stmt_iterator si;
1843
 
1844
        if (!nonlocal_goto)
1845
          gcc_assert (e->flags & EDGE_EH);
1846
 
1847
        if (!can_throw)
1848
          gcc_assert (!(e->flags & EDGE_EH));
1849
 
1850
        for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1851
          {
1852
            edge re;
1853
 
1854
            phi = gsi_stmt (si);
1855
 
1856
            /* There shouldn't be any PHI nodes in the ENTRY_BLOCK.  */
1857
            gcc_assert (!e->dest->aux);
1858
 
1859
            gcc_assert ((e->flags & EDGE_EH)
1860
                        || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1861
 
1862
            if (!is_gimple_reg (PHI_RESULT (phi)))
1863
              {
1864
                mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1865
                continue;
1866
              }
1867
 
1868
            re = find_edge (ret_bb, e->dest);
1869
            gcc_assert (re);
1870
            gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1871
                        == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1872
 
1873
            SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1874
                     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1875
          }
1876
      }
1877
}
1878
 
1879
 
1880
/* Copy edges from BB into its copy constructed earlier, scale profile
1881
   accordingly.  Edges will be taken care of later.  Assume aux
1882
   pointers to point to the copies of each BB.  Return true if any
1883
   debug stmts are left after a statement that must end the basic block.  */
1884
 
1885
static bool
1886
copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1887
{
1888
  basic_block new_bb = (basic_block) bb->aux;
1889
  edge_iterator ei;
1890
  edge old_edge;
1891
  gimple_stmt_iterator si;
1892
  int flags;
1893
  bool need_debug_cleanup = false;
1894
 
1895
  /* Use the indices from the original blocks to create edges for the
1896
     new ones.  */
1897
  FOR_EACH_EDGE (old_edge, ei, bb->succs)
1898
    if (!(old_edge->flags & EDGE_EH))
1899
      {
1900
        edge new_edge;
1901
 
1902
        flags = old_edge->flags;
1903
 
1904
        /* Return edges do get a FALLTHRU flag when the get inlined.  */
1905
        if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1906
            && old_edge->dest->aux != EXIT_BLOCK_PTR)
1907
          flags |= EDGE_FALLTHRU;
1908
        new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1909
        new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1910
        new_edge->probability = old_edge->probability;
1911
      }
1912
 
1913
  if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1914
    return false;
1915
 
1916
  for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1917
    {
1918
      gimple copy_stmt;
1919
      bool can_throw, nonlocal_goto;
1920
 
1921
      copy_stmt = gsi_stmt (si);
1922
      if (!is_gimple_debug (copy_stmt))
1923
        {
1924
          update_stmt (copy_stmt);
1925
          if (gimple_in_ssa_p (cfun))
1926
            mark_symbols_for_renaming (copy_stmt);
1927
        }
1928
 
1929
      /* Do this before the possible split_block.  */
1930
      gsi_next (&si);
1931
 
1932
      /* If this tree could throw an exception, there are two
1933
         cases where we need to add abnormal edge(s): the
1934
         tree wasn't in a region and there is a "current
1935
         region" in the caller; or the original tree had
1936
         EH edges.  In both cases split the block after the tree,
1937
         and add abnormal edge(s) as needed; we need both
1938
         those from the callee and the caller.
1939
         We check whether the copy can throw, because the const
1940
         propagation can change an INDIRECT_REF which throws
1941
         into a COMPONENT_REF which doesn't.  If the copy
1942
         can throw, the original could also throw.  */
1943
      can_throw = stmt_can_throw_internal (copy_stmt);
1944
      nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1945
 
1946
      if (can_throw || nonlocal_goto)
1947
        {
1948
          if (!gsi_end_p (si))
1949
            {
1950
              while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1951
                gsi_next (&si);
1952
              if (gsi_end_p (si))
1953
                need_debug_cleanup = true;
1954
            }
1955
          if (!gsi_end_p (si))
1956
            /* Note that bb's predecessor edges aren't necessarily
1957
               right at this point; split_block doesn't care.  */
1958
            {
1959
              edge e = split_block (new_bb, copy_stmt);
1960
 
1961
              new_bb = e->dest;
1962
              new_bb->aux = e->src->aux;
1963
              si = gsi_start_bb (new_bb);
1964
            }
1965
        }
1966
 
1967
      if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1968
        make_eh_dispatch_edges (copy_stmt);
1969
      else if (can_throw)
1970
        make_eh_edges (copy_stmt);
1971
 
1972
      if (nonlocal_goto)
1973
        make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1974
 
1975
      if ((can_throw || nonlocal_goto)
1976
          && gimple_in_ssa_p (cfun))
1977
        update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1978
                                          can_throw, nonlocal_goto);
1979
    }
1980
  return need_debug_cleanup;
1981
}
1982
 
1983
/* Copy the PHIs.  All blocks and edges are copied, some blocks
1984
   was possibly split and new outgoing EH edges inserted.
1985
   BB points to the block of original function and AUX pointers links
1986
   the original and newly copied blocks.  */
1987
 
1988
static void
1989
copy_phis_for_bb (basic_block bb, copy_body_data *id)
1990
{
1991
  basic_block const new_bb = (basic_block) bb->aux;
1992
  edge_iterator ei;
1993
  gimple phi;
1994
  gimple_stmt_iterator si;
1995
  edge new_edge;
1996
  bool inserted = false;
1997
 
1998
  for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1999
    {
2000
      tree res, new_res;
2001
      gimple new_phi;
2002
 
2003
      phi = gsi_stmt (si);
2004
      res = PHI_RESULT (phi);
2005
      new_res = res;
2006
      if (is_gimple_reg (res))
2007
        {
2008
          walk_tree (&new_res, copy_tree_body_r, id, NULL);
2009
          SSA_NAME_DEF_STMT (new_res)
2010
            = new_phi = create_phi_node (new_res, new_bb);
2011
          FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2012
            {
2013
              edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2014
              tree arg;
2015
              tree new_arg;
2016
              tree block = id->block;
2017
              edge_iterator ei2;
2018
 
2019
              /* When doing partial cloning, we allow PHIs on the entry block
2020
                 as long as all the arguments are the same.  Find any input
2021
                 edge to see argument to copy.  */
2022
              if (!old_edge)
2023
                FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2024
                  if (!old_edge->src->aux)
2025
                    break;
2026
 
2027
              arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2028
              new_arg = arg;
2029
              id->block = NULL_TREE;
2030
              walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2031
              id->block = block;
2032
              gcc_assert (new_arg);
2033
              /* With return slot optimization we can end up with
2034
                 non-gimple (foo *)&this->m, fix that here.  */
2035
              if (TREE_CODE (new_arg) != SSA_NAME
2036
                  && TREE_CODE (new_arg) != FUNCTION_DECL
2037
                  && !is_gimple_val (new_arg))
2038
                {
2039
                  gimple_seq stmts = NULL;
2040
                  new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2041
                  gsi_insert_seq_on_edge (new_edge, stmts);
2042
                  inserted = true;
2043
                }
2044
              add_phi_arg (new_phi, new_arg, new_edge,
2045
                           gimple_phi_arg_location_from_edge (phi, old_edge));
2046
            }
2047
        }
2048
    }
2049
 
2050
  /* Commit the delayed edge insertions.  */
2051
  if (inserted)
2052
    FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2053
      gsi_commit_one_edge_insert (new_edge, NULL);
2054
}
2055
 
2056
 
2057
/* Wrapper for remap_decl so it can be used as a callback.  */
2058
 
2059
static tree
2060
remap_decl_1 (tree decl, void *data)
2061
{
2062
  return remap_decl (decl, (copy_body_data *) data);
2063
}
2064
 
2065
/* Build struct function and associated datastructures for the new clone
2066
   NEW_FNDECL to be build.  CALLEE_FNDECL is the original */
2067
 
2068
static void
2069
initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2070
{
2071
  struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2072
  gcov_type count_scale;
2073
 
2074
  if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2075
    count_scale = (REG_BR_PROB_BASE * count
2076
                   / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2077
  else
2078
    count_scale = REG_BR_PROB_BASE;
2079
 
2080
  /* Register specific tree functions.  */
2081
  gimple_register_cfg_hooks ();
2082
 
2083
  /* Get clean struct function.  */
2084
  push_struct_function (new_fndecl);
2085
 
2086
  /* We will rebuild these, so just sanity check that they are empty.  */
2087
  gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2088
  gcc_assert (cfun->local_decls == NULL);
2089
  gcc_assert (cfun->cfg == NULL);
2090
  gcc_assert (cfun->decl == new_fndecl);
2091
 
2092
  /* Copy items we preserve during cloning.  */
2093
  cfun->static_chain_decl = src_cfun->static_chain_decl;
2094
  cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2095
  cfun->function_end_locus = src_cfun->function_end_locus;
2096
  cfun->curr_properties = src_cfun->curr_properties;
2097
  cfun->last_verified = src_cfun->last_verified;
2098
  cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2099
  cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2100
  cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2101
  cfun->stdarg = src_cfun->stdarg;
2102
  cfun->after_inlining = src_cfun->after_inlining;
2103
  cfun->can_throw_non_call_exceptions
2104
    = src_cfun->can_throw_non_call_exceptions;
2105
  cfun->returns_struct = src_cfun->returns_struct;
2106
  cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2107
  cfun->after_tree_profile = src_cfun->after_tree_profile;
2108
 
2109
  init_empty_tree_cfg ();
2110
 
2111
  profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2112
  ENTRY_BLOCK_PTR->count =
2113
    (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2114
     REG_BR_PROB_BASE);
2115
  ENTRY_BLOCK_PTR->frequency
2116
    = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2117
  EXIT_BLOCK_PTR->count =
2118
    (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2119
     REG_BR_PROB_BASE);
2120
  EXIT_BLOCK_PTR->frequency =
2121
    EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2122
  if (src_cfun->eh)
2123
    init_eh_for_function ();
2124
 
2125
  if (src_cfun->gimple_df)
2126
    {
2127
      init_tree_ssa (cfun);
2128
      cfun->gimple_df->in_ssa_p = true;
2129
      init_ssa_operands ();
2130
    }
2131
  pop_cfun ();
2132
}
2133
 
2134
/* Helper function for copy_cfg_body.  Move debug stmts from the end
2135
   of NEW_BB to the beginning of successor basic blocks when needed.  If the
2136
   successor has multiple predecessors, reset them, otherwise keep
2137
   their value.  */
2138
 
2139
static void
2140
maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2141
{
2142
  edge e;
2143
  edge_iterator ei;
2144
  gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2145
 
2146
  if (gsi_end_p (si)
2147
      || gsi_one_before_end_p (si)
2148
      || !(stmt_can_throw_internal (gsi_stmt (si))
2149
           || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2150
    return;
2151
 
2152
  FOR_EACH_EDGE (e, ei, new_bb->succs)
2153
    {
2154
      gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2155
      gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2156
      while (is_gimple_debug (gsi_stmt (ssi)))
2157
        {
2158
          gimple stmt = gsi_stmt (ssi), new_stmt;
2159
          tree var;
2160
          tree value;
2161
 
2162
          /* For the last edge move the debug stmts instead of copying
2163
             them.  */
2164
          if (ei_one_before_end_p (ei))
2165
            {
2166
              si = ssi;
2167
              gsi_prev (&ssi);
2168
              if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2169
                gimple_debug_bind_reset_value (stmt);
2170
              gsi_remove (&si, false);
2171
              gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2172
              continue;
2173
            }
2174
 
2175
          if (gimple_debug_bind_p (stmt))
2176
            {
2177
              var = gimple_debug_bind_get_var (stmt);
2178
              if (single_pred_p (e->dest))
2179
                {
2180
                  value = gimple_debug_bind_get_value (stmt);
2181
                  value = unshare_expr (value);
2182
                }
2183
              else
2184
                value = NULL_TREE;
2185
              new_stmt = gimple_build_debug_bind (var, value, stmt);
2186
            }
2187
          else if (gimple_debug_source_bind_p (stmt))
2188
            {
2189
              var = gimple_debug_source_bind_get_var (stmt);
2190
              value = gimple_debug_source_bind_get_value (stmt);
2191
              new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2192
            }
2193
          else
2194
            gcc_unreachable ();
2195
          gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2196
          VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2197
          gsi_prev (&ssi);
2198
        }
2199
    }
2200
}
2201
 
2202
/* Make a copy of the body of FN so that it can be inserted inline in
2203
   another function.  Walks FN via CFG, returns new fndecl.  */
2204
 
2205
static tree
2206
copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2207
               basic_block entry_block_map, basic_block exit_block_map,
2208
               bitmap blocks_to_copy, basic_block new_entry)
2209
{
2210
  tree callee_fndecl = id->src_fn;
2211
  /* Original cfun for the callee, doesn't change.  */
2212
  struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2213
  struct function *cfun_to_copy;
2214
  basic_block bb;
2215
  tree new_fndecl = NULL;
2216
  bool need_debug_cleanup = false;
2217
  gcov_type count_scale;
2218
  int last;
2219
  int incoming_frequency = 0;
2220
  gcov_type incoming_count = 0;
2221
 
2222
  if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2223
    count_scale = (REG_BR_PROB_BASE * count
2224
                   / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2225
  else
2226
    count_scale = REG_BR_PROB_BASE;
2227
 
2228
  /* Register specific tree functions.  */
2229
  gimple_register_cfg_hooks ();
2230
 
2231
  /* If we are inlining just region of the function, make sure to connect new entry
2232
     to ENTRY_BLOCK_PTR.  Since new entry can be part of loop, we must compute
2233
     frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2234
     probabilities of edges incoming from nonduplicated region.  */
2235
  if (new_entry)
2236
    {
2237
      edge e;
2238
      edge_iterator ei;
2239
 
2240
      FOR_EACH_EDGE (e, ei, new_entry->preds)
2241
        if (!e->src->aux)
2242
          {
2243
            incoming_frequency += EDGE_FREQUENCY (e);
2244
            incoming_count += e->count;
2245
          }
2246
      incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
2247
      incoming_frequency
2248
        = incoming_frequency * frequency_scale / REG_BR_PROB_BASE;
2249
      ENTRY_BLOCK_PTR->count = incoming_count;
2250
      ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2251
    }
2252
 
2253
  /* Must have a CFG here at this point.  */
2254
  gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2255
              (DECL_STRUCT_FUNCTION (callee_fndecl)));
2256
 
2257
  cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2258
 
2259
  ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2260
  EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2261
  entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2262
  exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2263
 
2264
  /* Duplicate any exception-handling regions.  */
2265
  if (cfun->eh)
2266
    id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2267
                                       remap_decl_1, id);
2268
 
2269
  /* Use aux pointers to map the original blocks to copy.  */
2270
  FOR_EACH_BB_FN (bb, cfun_to_copy)
2271
    if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2272
      {
2273
        basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2274
        bb->aux = new_bb;
2275
        new_bb->aux = bb;
2276
      }
2277
 
2278
  last = last_basic_block;
2279
 
2280
  /* Now that we've duplicated the blocks, duplicate their edges.  */
2281
  FOR_ALL_BB_FN (bb, cfun_to_copy)
2282
    if (!blocks_to_copy
2283
        || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2284
      need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2285
 
2286
  if (new_entry)
2287
    {
2288
      edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2289
      e->probability = REG_BR_PROB_BASE;
2290
      e->count = incoming_count;
2291
    }
2292
 
2293
  if (gimple_in_ssa_p (cfun))
2294
    FOR_ALL_BB_FN (bb, cfun_to_copy)
2295
      if (!blocks_to_copy
2296
          || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2297
        copy_phis_for_bb (bb, id);
2298
 
2299
  FOR_ALL_BB_FN (bb, cfun_to_copy)
2300
    if (bb->aux)
2301
      {
2302
        if (need_debug_cleanup
2303
            && bb->index != ENTRY_BLOCK
2304
            && bb->index != EXIT_BLOCK)
2305
          maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2306
        ((basic_block)bb->aux)->aux = NULL;
2307
        bb->aux = NULL;
2308
      }
2309
 
2310
  /* Zero out AUX fields of newly created block during EH edge
2311
     insertion. */
2312
  for (; last < last_basic_block; last++)
2313
    {
2314
      if (need_debug_cleanup)
2315
        maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2316
      BASIC_BLOCK (last)->aux = NULL;
2317
    }
2318
  entry_block_map->aux = NULL;
2319
  exit_block_map->aux = NULL;
2320
 
2321
  if (id->eh_map)
2322
    {
2323
      pointer_map_destroy (id->eh_map);
2324
      id->eh_map = NULL;
2325
    }
2326
 
2327
  return new_fndecl;
2328
}
2329
 
2330
/* Copy the debug STMT using ID.  We deal with these statements in a
2331
   special way: if any variable in their VALUE expression wasn't
2332
   remapped yet, we won't remap it, because that would get decl uids
2333
   out of sync, causing codegen differences between -g and -g0.  If
2334
   this arises, we drop the VALUE expression altogether.  */
2335
 
2336
static void
2337
copy_debug_stmt (gimple stmt, copy_body_data *id)
2338
{
2339
  tree t, *n;
2340
  struct walk_stmt_info wi;
2341
 
2342
  t = id->block;
2343
  if (gimple_block (stmt))
2344
    {
2345
      n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2346
      if (n)
2347
        t = *n;
2348
    }
2349
  gimple_set_block (stmt, t);
2350
 
2351
  /* Remap all the operands in COPY.  */
2352
  memset (&wi, 0, sizeof (wi));
2353
  wi.info = id;
2354
 
2355
  processing_debug_stmt = 1;
2356
 
2357
  if (gimple_debug_source_bind_p (stmt))
2358
    t = gimple_debug_source_bind_get_var (stmt);
2359
  else
2360
    t = gimple_debug_bind_get_var (stmt);
2361
 
2362
  if (TREE_CODE (t) == PARM_DECL && id->debug_map
2363
      && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2364
    {
2365
      gcc_assert (TREE_CODE (*n) == VAR_DECL);
2366
      t = *n;
2367
    }
2368
  else if (TREE_CODE (t) == VAR_DECL
2369
           && !TREE_STATIC (t)
2370
           && gimple_in_ssa_p (cfun)
2371
           && !pointer_map_contains (id->decl_map, t)
2372
           && !var_ann (t))
2373
    /* T is a non-localized variable.  */;
2374
  else
2375
    walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2376
 
2377
  if (gimple_debug_bind_p (stmt))
2378
    {
2379
      gimple_debug_bind_set_var (stmt, t);
2380
 
2381
      if (gimple_debug_bind_has_value_p (stmt))
2382
        walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2383
                   remap_gimple_op_r, &wi, NULL);
2384
 
2385
      /* Punt if any decl couldn't be remapped.  */
2386
      if (processing_debug_stmt < 0)
2387
        gimple_debug_bind_reset_value (stmt);
2388
    }
2389
  else if (gimple_debug_source_bind_p (stmt))
2390
    {
2391
      gimple_debug_source_bind_set_var (stmt, t);
2392
      walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2393
                 remap_gimple_op_r, &wi, NULL);
2394
    }
2395
 
2396
  processing_debug_stmt = 0;
2397
 
2398
  update_stmt (stmt);
2399
  if (gimple_in_ssa_p (cfun))
2400
    mark_symbols_for_renaming (stmt);
2401
}
2402
 
2403
/* Process deferred debug stmts.  In order to give values better odds
2404
   of being successfully remapped, we delay the processing of debug
2405
   stmts until all other stmts that might require remapping are
2406
   processed.  */
2407
 
2408
static void
2409
copy_debug_stmts (copy_body_data *id)
2410
{
2411
  size_t i;
2412
  gimple stmt;
2413
 
2414
  if (!id->debug_stmts)
2415
    return;
2416
 
2417
  FOR_EACH_VEC_ELT (gimple, id->debug_stmts, i, stmt)
2418
    copy_debug_stmt (stmt, id);
2419
 
2420
  VEC_free (gimple, heap, id->debug_stmts);
2421
}
2422
 
2423
/* Make a copy of the body of SRC_FN so that it can be inserted inline in
2424
   another function.  */
2425
 
2426
static tree
2427
copy_tree_body (copy_body_data *id)
2428
{
2429
  tree fndecl = id->src_fn;
2430
  tree body = DECL_SAVED_TREE (fndecl);
2431
 
2432
  walk_tree (&body, copy_tree_body_r, id, NULL);
2433
 
2434
  return body;
2435
}
2436
 
2437
/* Make a copy of the body of FN so that it can be inserted inline in
2438
   another function.  */
2439
 
2440
static tree
2441
copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2442
           basic_block entry_block_map, basic_block exit_block_map,
2443
           bitmap blocks_to_copy, basic_block new_entry)
2444
{
2445
  tree fndecl = id->src_fn;
2446
  tree body;
2447
 
2448
  /* If this body has a CFG, walk CFG and copy.  */
2449
  gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2450
  body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2451
                        blocks_to_copy, new_entry);
2452
  copy_debug_stmts (id);
2453
 
2454
  return body;
2455
}
2456
 
2457
/* Return true if VALUE is an ADDR_EXPR of an automatic variable
2458
   defined in function FN, or of a data member thereof.  */
2459
 
2460
static bool
2461
self_inlining_addr_expr (tree value, tree fn)
2462
{
2463
  tree var;
2464
 
2465
  if (TREE_CODE (value) != ADDR_EXPR)
2466
    return false;
2467
 
2468
  var = get_base_address (TREE_OPERAND (value, 0));
2469
 
2470
  return var && auto_var_in_fn_p (var, fn);
2471
}
2472
 
2473
/* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2474
   lexical block and line number information from base_stmt, if given,
2475
   or from the last stmt of the block otherwise.  */
2476
 
2477
static gimple
2478
insert_init_debug_bind (copy_body_data *id,
2479
                        basic_block bb, tree var, tree value,
2480
                        gimple base_stmt)
2481
{
2482
  gimple note;
2483
  gimple_stmt_iterator gsi;
2484
  tree tracked_var;
2485
 
2486
  if (!gimple_in_ssa_p (id->src_cfun))
2487
    return NULL;
2488
 
2489
  if (!MAY_HAVE_DEBUG_STMTS)
2490
    return NULL;
2491
 
2492
  tracked_var = target_for_debug_bind (var);
2493
  if (!tracked_var)
2494
    return NULL;
2495
 
2496
  if (bb)
2497
    {
2498
      gsi = gsi_last_bb (bb);
2499
      if (!base_stmt && !gsi_end_p (gsi))
2500
        base_stmt = gsi_stmt (gsi);
2501
    }
2502
 
2503
  note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2504
 
2505
  if (bb)
2506
    {
2507
      if (!gsi_end_p (gsi))
2508
        gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2509
      else
2510
        gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2511
    }
2512
 
2513
  return note;
2514
}
2515
 
2516
static void
2517
insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2518
{
2519
  /* If VAR represents a zero-sized variable, it's possible that the
2520
     assignment statement may result in no gimple statements.  */
2521
  if (init_stmt)
2522
    {
2523
      gimple_stmt_iterator si = gsi_last_bb (bb);
2524
 
2525
      /* We can end up with init statements that store to a non-register
2526
         from a rhs with a conversion.  Handle that here by forcing the
2527
         rhs into a temporary.  gimple_regimplify_operands is not
2528
         prepared to do this for us.  */
2529
      if (!is_gimple_debug (init_stmt)
2530
          && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2531
          && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2532
          && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2533
        {
2534
          tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2535
                             gimple_expr_type (init_stmt),
2536
                             gimple_assign_rhs1 (init_stmt));
2537
          rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2538
                                          GSI_NEW_STMT);
2539
          gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2540
          gimple_assign_set_rhs1 (init_stmt, rhs);
2541
        }
2542
      gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2543
      gimple_regimplify_operands (init_stmt, &si);
2544
      mark_symbols_for_renaming (init_stmt);
2545
 
2546
      if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2547
        {
2548
          tree var, def = gimple_assign_lhs (init_stmt);
2549
 
2550
          if (TREE_CODE (def) == SSA_NAME)
2551
            var = SSA_NAME_VAR (def);
2552
          else
2553
            var = def;
2554
 
2555
          insert_init_debug_bind (id, bb, var, def, init_stmt);
2556
        }
2557
    }
2558
}
2559
 
2560
/* Initialize parameter P with VALUE.  If needed, produce init statement
2561
   at the end of BB.  When BB is NULL, we return init statement to be
2562
   output later.  */
2563
static gimple
2564
setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2565
                     basic_block bb, tree *vars)
2566
{
2567
  gimple init_stmt = NULL;
2568
  tree var;
2569
  tree rhs = value;
2570
  tree def = (gimple_in_ssa_p (cfun)
2571
              ? gimple_default_def (id->src_cfun, p) : NULL);
2572
 
2573
  if (value
2574
      && value != error_mark_node
2575
      && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2576
    {
2577
      /* If we can match up types by promotion/demotion do so.  */
2578
      if (fold_convertible_p (TREE_TYPE (p), value))
2579
        rhs = fold_convert (TREE_TYPE (p), value);
2580
      else
2581
        {
2582
          /* ???  For valid programs we should not end up here.
2583
             Still if we end up with truly mismatched types here, fall back
2584
             to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2585
             GIMPLE to the following passes.  */
2586
          if (!is_gimple_reg_type (TREE_TYPE (value))
2587
              || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2588
            rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2589
          else
2590
            rhs = build_zero_cst (TREE_TYPE (p));
2591
        }
2592
    }
2593
 
2594
  /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
2595
     here since the type of this decl must be visible to the calling
2596
     function.  */
2597
  var = copy_decl_to_var (p, id);
2598
 
2599
  /* We're actually using the newly-created var.  */
2600
  if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2601
    add_referenced_var (var);
2602
 
2603
  /* Declare this new variable.  */
2604
  DECL_CHAIN (var) = *vars;
2605
  *vars = var;
2606
 
2607
  /* Make gimplifier happy about this variable.  */
2608
  DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2609
 
2610
  /* If the parameter is never assigned to, has no SSA_NAMEs created,
2611
     we would not need to create a new variable here at all, if it
2612
     weren't for debug info.  Still, we can just use the argument
2613
     value.  */
2614
  if (TREE_READONLY (p)
2615
      && !TREE_ADDRESSABLE (p)
2616
      && value && !TREE_SIDE_EFFECTS (value)
2617
      && !def)
2618
    {
2619
      /* We may produce non-gimple trees by adding NOPs or introduce
2620
         invalid sharing when operand is not really constant.
2621
         It is not big deal to prohibit constant propagation here as
2622
         we will constant propagate in DOM1 pass anyway.  */
2623
      if (is_gimple_min_invariant (value)
2624
          && useless_type_conversion_p (TREE_TYPE (p),
2625
                                                 TREE_TYPE (value))
2626
          /* We have to be very careful about ADDR_EXPR.  Make sure
2627
             the base variable isn't a local variable of the inlined
2628
             function, e.g., when doing recursive inlining, direct or
2629
             mutually-recursive or whatever, which is why we don't
2630
             just test whether fn == current_function_decl.  */
2631
          && ! self_inlining_addr_expr (value, fn))
2632
        {
2633
          insert_decl_map (id, p, value);
2634
          insert_debug_decl_map (id, p, var);
2635
          return insert_init_debug_bind (id, bb, var, value, NULL);
2636
        }
2637
    }
2638
 
2639
  /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2640
     that way, when the PARM_DECL is encountered, it will be
2641
     automatically replaced by the VAR_DECL.  */
2642
  insert_decl_map (id, p, var);
2643
 
2644
  /* Even if P was TREE_READONLY, the new VAR should not be.
2645
     In the original code, we would have constructed a
2646
     temporary, and then the function body would have never
2647
     changed the value of P.  However, now, we will be
2648
     constructing VAR directly.  The constructor body may
2649
     change its value multiple times as it is being
2650
     constructed.  Therefore, it must not be TREE_READONLY;
2651
     the back-end assumes that TREE_READONLY variable is
2652
     assigned to only once.  */
2653
  if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2654
    TREE_READONLY (var) = 0;
2655
 
2656
  /* If there is no setup required and we are in SSA, take the easy route
2657
     replacing all SSA names representing the function parameter by the
2658
     SSA name passed to function.
2659
 
2660
     We need to construct map for the variable anyway as it might be used
2661
     in different SSA names when parameter is set in function.
2662
 
2663
     Do replacement at -O0 for const arguments replaced by constant.
2664
     This is important for builtin_constant_p and other construct requiring
2665
     constant argument to be visible in inlined function body.  */
2666
  if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2667
      && (optimize
2668
          || (TREE_READONLY (p)
2669
              && is_gimple_min_invariant (rhs)))
2670
      && (TREE_CODE (rhs) == SSA_NAME
2671
          || is_gimple_min_invariant (rhs))
2672
      && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2673
    {
2674
      insert_decl_map (id, def, rhs);
2675
      return insert_init_debug_bind (id, bb, var, rhs, NULL);
2676
    }
2677
 
2678
  /* If the value of argument is never used, don't care about initializing
2679
     it.  */
2680
  if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2681
    {
2682
      gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2683
      return insert_init_debug_bind (id, bb, var, rhs, NULL);
2684
    }
2685
 
2686
  /* Initialize this VAR_DECL from the equivalent argument.  Convert
2687
     the argument to the proper type in case it was promoted.  */
2688
  if (value)
2689
    {
2690
      if (rhs == error_mark_node)
2691
        {
2692
          insert_decl_map (id, p, var);
2693
          return insert_init_debug_bind (id, bb, var, rhs, NULL);
2694
        }
2695
 
2696
      STRIP_USELESS_TYPE_CONVERSION (rhs);
2697
 
2698
      /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2699
         keep our trees in gimple form.  */
2700
      if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2701
        {
2702
          def = remap_ssa_name (def, id);
2703
          init_stmt = gimple_build_assign (def, rhs);
2704
          SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2705
          set_default_def (var, NULL);
2706
        }
2707
      else
2708
        init_stmt = gimple_build_assign (var, rhs);
2709
 
2710
      if (bb && init_stmt)
2711
        insert_init_stmt (id, bb, init_stmt);
2712
    }
2713
  return init_stmt;
2714
}
2715
 
2716
/* Generate code to initialize the parameters of the function at the
2717
   top of the stack in ID from the GIMPLE_CALL STMT.  */
2718
 
2719
static void
2720
initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2721
                               tree fn, basic_block bb)
2722
{
2723
  tree parms;
2724
  size_t i;
2725
  tree p;
2726
  tree vars = NULL_TREE;
2727
  tree static_chain = gimple_call_chain (stmt);
2728
 
2729
  /* Figure out what the parameters are.  */
2730
  parms = DECL_ARGUMENTS (fn);
2731
 
2732
  /* Loop through the parameter declarations, replacing each with an
2733
     equivalent VAR_DECL, appropriately initialized.  */
2734
  for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2735
    {
2736
      tree val;
2737
      val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2738
      setup_one_parameter (id, p, val, fn, bb, &vars);
2739
    }
2740
  /* After remapping parameters remap their types.  This has to be done
2741
     in a second loop over all parameters to appropriately remap
2742
     variable sized arrays when the size is specified in a
2743
     parameter following the array.  */
2744
  for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2745
    {
2746
      tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2747
      if (varp
2748
          && TREE_CODE (*varp) == VAR_DECL)
2749
        {
2750
          tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2751
                      ? gimple_default_def (id->src_cfun, p) : NULL);
2752
          tree var = *varp;
2753
          TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2754
          /* Also remap the default definition if it was remapped
2755
             to the default definition of the parameter replacement
2756
             by the parameter setup.  */
2757
          if (def)
2758
            {
2759
              tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2760
              if (defp
2761
                  && TREE_CODE (*defp) == SSA_NAME
2762
                  && SSA_NAME_VAR (*defp) == var)
2763
                TREE_TYPE (*defp) = TREE_TYPE (var);
2764
            }
2765
        }
2766
    }
2767
 
2768
  /* Initialize the static chain.  */
2769
  p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2770
  gcc_assert (fn != current_function_decl);
2771
  if (p)
2772
    {
2773
      /* No static chain?  Seems like a bug in tree-nested.c.  */
2774
      gcc_assert (static_chain);
2775
 
2776
      setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2777
    }
2778
 
2779
  declare_inline_vars (id->block, vars);
2780
}
2781
 
2782
 
2783
/* Declare a return variable to replace the RESULT_DECL for the
2784
   function we are calling.  An appropriate DECL_STMT is returned.
2785
   The USE_STMT is filled to contain a use of the declaration to
2786
   indicate the return value of the function.
2787
 
2788
   RETURN_SLOT, if non-null is place where to store the result.  It
2789
   is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
2790
   was the LHS of the MODIFY_EXPR to which this call is the RHS.
2791
 
2792
   The return value is a (possibly null) value that holds the result
2793
   as seen by the caller.  */
2794
 
2795
static tree
2796
declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2797
                         basic_block entry_bb)
2798
{
2799
  tree callee = id->src_fn;
2800
  tree result = DECL_RESULT (callee);
2801
  tree callee_type = TREE_TYPE (result);
2802
  tree caller_type;
2803
  tree var, use;
2804
 
2805
  /* Handle type-mismatches in the function declaration return type
2806
     vs. the call expression.  */
2807
  if (modify_dest)
2808
    caller_type = TREE_TYPE (modify_dest);
2809
  else
2810
    caller_type = TREE_TYPE (TREE_TYPE (callee));
2811
 
2812
  /* We don't need to do anything for functions that don't return anything.  */
2813
  if (VOID_TYPE_P (callee_type))
2814
    return NULL_TREE;
2815
 
2816
  /* If there was a return slot, then the return value is the
2817
     dereferenced address of that object.  */
2818
  if (return_slot)
2819
    {
2820
      /* The front end shouldn't have used both return_slot and
2821
         a modify expression.  */
2822
      gcc_assert (!modify_dest);
2823
      if (DECL_BY_REFERENCE (result))
2824
        {
2825
          tree return_slot_addr = build_fold_addr_expr (return_slot);
2826
          STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2827
 
2828
          /* We are going to construct *&return_slot and we can't do that
2829
             for variables believed to be not addressable.
2830
 
2831
             FIXME: This check possibly can match, because values returned
2832
             via return slot optimization are not believed to have address
2833
             taken by alias analysis.  */
2834
          gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2835
          var = return_slot_addr;
2836
        }
2837
      else
2838
        {
2839
          var = return_slot;
2840
          gcc_assert (TREE_CODE (var) != SSA_NAME);
2841
          TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2842
        }
2843
      if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2844
           || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2845
          && !DECL_GIMPLE_REG_P (result)
2846
          && DECL_P (var))
2847
        DECL_GIMPLE_REG_P (var) = 0;
2848
      use = NULL;
2849
      goto done;
2850
    }
2851
 
2852
  /* All types requiring non-trivial constructors should have been handled.  */
2853
  gcc_assert (!TREE_ADDRESSABLE (callee_type));
2854
 
2855
  /* Attempt to avoid creating a new temporary variable.  */
2856
  if (modify_dest
2857
      && TREE_CODE (modify_dest) != SSA_NAME)
2858
    {
2859
      bool use_it = false;
2860
 
2861
      /* We can't use MODIFY_DEST if there's type promotion involved.  */
2862
      if (!useless_type_conversion_p (callee_type, caller_type))
2863
        use_it = false;
2864
 
2865
      /* ??? If we're assigning to a variable sized type, then we must
2866
         reuse the destination variable, because we've no good way to
2867
         create variable sized temporaries at this point.  */
2868
      else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2869
        use_it = true;
2870
 
2871
      /* If the callee cannot possibly modify MODIFY_DEST, then we can
2872
         reuse it as the result of the call directly.  Don't do this if
2873
         it would promote MODIFY_DEST to addressable.  */
2874
      else if (TREE_ADDRESSABLE (result))
2875
        use_it = false;
2876
      else
2877
        {
2878
          tree base_m = get_base_address (modify_dest);
2879
 
2880
          /* If the base isn't a decl, then it's a pointer, and we don't
2881
             know where that's going to go.  */
2882
          if (!DECL_P (base_m))
2883
            use_it = false;
2884
          else if (is_global_var (base_m))
2885
            use_it = false;
2886
          else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2887
                    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2888
                   && !DECL_GIMPLE_REG_P (result)
2889
                   && DECL_GIMPLE_REG_P (base_m))
2890
            use_it = false;
2891
          else if (!TREE_ADDRESSABLE (base_m))
2892
            use_it = true;
2893
        }
2894
 
2895
      if (use_it)
2896
        {
2897
          var = modify_dest;
2898
          use = NULL;
2899
          goto done;
2900
        }
2901
    }
2902
 
2903
  gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2904
 
2905
  var = copy_result_decl_to_var (result, id);
2906
  if (gimple_in_ssa_p (cfun))
2907
    add_referenced_var (var);
2908
 
2909
  DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2910
 
2911
  /* Do not have the rest of GCC warn about this variable as it should
2912
     not be visible to the user.  */
2913
  TREE_NO_WARNING (var) = 1;
2914
 
2915
  declare_inline_vars (id->block, var);
2916
 
2917
  /* Build the use expr.  If the return type of the function was
2918
     promoted, convert it back to the expected type.  */
2919
  use = var;
2920
  if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2921
    {
2922
      /* If we can match up types by promotion/demotion do so.  */
2923
      if (fold_convertible_p (caller_type, var))
2924
        use = fold_convert (caller_type, var);
2925
      else
2926
        {
2927
          /* ???  For valid programs we should not end up here.
2928
             Still if we end up with truly mismatched types here, fall back
2929
             to using a MEM_REF to not leak invalid GIMPLE to the following
2930
             passes.  */
2931
          /* Prevent var from being written into SSA form.  */
2932
          if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
2933
              || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
2934
            DECL_GIMPLE_REG_P (var) = false;
2935
          else if (is_gimple_reg_type (TREE_TYPE (var)))
2936
            TREE_ADDRESSABLE (var) = true;
2937
          use = fold_build2 (MEM_REF, caller_type,
2938
                             build_fold_addr_expr (var),
2939
                             build_int_cst (ptr_type_node, 0));
2940
        }
2941
    }
2942
 
2943
  STRIP_USELESS_TYPE_CONVERSION (use);
2944
 
2945
  if (DECL_BY_REFERENCE (result))
2946
    {
2947
      TREE_ADDRESSABLE (var) = 1;
2948
      var = build_fold_addr_expr (var);
2949
    }
2950
 
2951
 done:
2952
  /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2953
     way, when the RESULT_DECL is encountered, it will be
2954
     automatically replaced by the VAR_DECL.
2955
 
2956
     When returning by reference, ensure that RESULT_DECL remaps to
2957
     gimple_val.  */
2958
  if (DECL_BY_REFERENCE (result)
2959
      && !is_gimple_val (var))
2960
    {
2961
      tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
2962
      if (gimple_in_ssa_p (id->src_cfun))
2963
        add_referenced_var (temp);
2964
      insert_decl_map (id, result, temp);
2965
      /* When RESULT_DECL is in SSA form, we need to use it's default_def
2966
         SSA_NAME.  */
2967
      if (gimple_in_ssa_p (id->src_cfun) && gimple_default_def (id->src_cfun, result))
2968
        temp = remap_ssa_name (gimple_default_def (id->src_cfun, result), id);
2969
      insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
2970
    }
2971
  else
2972
    insert_decl_map (id, result, var);
2973
 
2974
  /* Remember this so we can ignore it in remap_decls.  */
2975
  id->retvar = var;
2976
 
2977
  return use;
2978
}
2979
 
2980
/* Callback through walk_tree.  Determine if a DECL_INITIAL makes reference
2981
   to a local label.  */
2982
 
2983
static tree
2984
has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2985
{
2986
  tree node = *nodep;
2987
  tree fn = (tree) fnp;
2988
 
2989
  if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2990
    return node;
2991
 
2992
  if (TYPE_P (node))
2993
    *walk_subtrees = 0;
2994
 
2995
  return NULL_TREE;
2996
}
2997
 
2998
/* Determine if the function can be copied.  If so return NULL.  If
2999
   not return a string describng the reason for failure.  */
3000
 
3001
static const char *
3002
copy_forbidden (struct function *fun, tree fndecl)
3003
{
3004
  const char *reason = fun->cannot_be_copied_reason;
3005
  tree decl;
3006
  unsigned ix;
3007
 
3008
  /* Only examine the function once.  */
3009
  if (fun->cannot_be_copied_set)
3010
    return reason;
3011
 
3012
  /* We cannot copy a function that receives a non-local goto
3013
     because we cannot remap the destination label used in the
3014
     function that is performing the non-local goto.  */
3015
  /* ??? Actually, this should be possible, if we work at it.
3016
     No doubt there's just a handful of places that simply
3017
     assume it doesn't happen and don't substitute properly.  */
3018
  if (fun->has_nonlocal_label)
3019
    {
3020
      reason = G_("function %q+F can never be copied "
3021
                  "because it receives a non-local goto");
3022
      goto fail;
3023
    }
3024
 
3025
  FOR_EACH_LOCAL_DECL (fun, ix, decl)
3026
    if (TREE_CODE (decl) == VAR_DECL
3027
        && TREE_STATIC (decl)
3028
        && !DECL_EXTERNAL (decl)
3029
        && DECL_INITIAL (decl)
3030
        && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3031
                                         has_label_address_in_static_1,
3032
                                         fndecl))
3033
      {
3034
        reason = G_("function %q+F can never be copied because it saves "
3035
                    "address of local label in a static variable");
3036
        goto fail;
3037
      }
3038
 
3039
 fail:
3040
  fun->cannot_be_copied_reason = reason;
3041
  fun->cannot_be_copied_set = true;
3042
  return reason;
3043
}
3044
 
3045
 
3046
static const char *inline_forbidden_reason;
3047
 
3048
/* A callback for walk_gimple_seq to handle statements.  Returns non-null
3049
   iff a function can not be inlined.  Also sets the reason why. */
3050
 
3051
static tree
3052
inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3053
                         struct walk_stmt_info *wip)
3054
{
3055
  tree fn = (tree) wip->info;
3056
  tree t;
3057
  gimple stmt = gsi_stmt (*gsi);
3058
 
3059
  switch (gimple_code (stmt))
3060
    {
3061
    case GIMPLE_CALL:
3062
      /* Refuse to inline alloca call unless user explicitly forced so as
3063
         this may change program's memory overhead drastically when the
3064
         function using alloca is called in loop.  In GCC present in
3065
         SPEC2000 inlining into schedule_block cause it to require 2GB of
3066
         RAM instead of 256MB.  Don't do so for alloca calls emitted for
3067
         VLA objects as those can't cause unbounded growth (they're always
3068
         wrapped inside stack_save/stack_restore regions.  */
3069
      if (gimple_alloca_call_p (stmt)
3070
          && !gimple_call_alloca_for_var_p (stmt)
3071
          && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3072
        {
3073
          inline_forbidden_reason
3074
            = G_("function %q+F can never be inlined because it uses "
3075
                 "alloca (override using the always_inline attribute)");
3076
          *handled_ops_p = true;
3077
          return fn;
3078
        }
3079
 
3080
      t = gimple_call_fndecl (stmt);
3081
      if (t == NULL_TREE)
3082
        break;
3083
 
3084
      /* We cannot inline functions that call setjmp.  */
3085
      if (setjmp_call_p (t))
3086
        {
3087
          inline_forbidden_reason
3088
            = G_("function %q+F can never be inlined because it uses setjmp");
3089
          *handled_ops_p = true;
3090
          return t;
3091
        }
3092
 
3093
      if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3094
        switch (DECL_FUNCTION_CODE (t))
3095
          {
3096
            /* We cannot inline functions that take a variable number of
3097
               arguments.  */
3098
          case BUILT_IN_VA_START:
3099
          case BUILT_IN_NEXT_ARG:
3100
          case BUILT_IN_VA_END:
3101
            inline_forbidden_reason
3102
              = G_("function %q+F can never be inlined because it "
3103
                   "uses variable argument lists");
3104
            *handled_ops_p = true;
3105
            return t;
3106
 
3107
          case BUILT_IN_LONGJMP:
3108
            /* We can't inline functions that call __builtin_longjmp at
3109
               all.  The non-local goto machinery really requires the
3110
               destination be in a different function.  If we allow the
3111
               function calling __builtin_longjmp to be inlined into the
3112
               function calling __builtin_setjmp, Things will Go Awry.  */
3113
            inline_forbidden_reason
3114
              = G_("function %q+F can never be inlined because "
3115
                   "it uses setjmp-longjmp exception handling");
3116
            *handled_ops_p = true;
3117
            return t;
3118
 
3119
          case BUILT_IN_NONLOCAL_GOTO:
3120
            /* Similarly.  */
3121
            inline_forbidden_reason
3122
              = G_("function %q+F can never be inlined because "
3123
                   "it uses non-local goto");
3124
            *handled_ops_p = true;
3125
            return t;
3126
 
3127
          case BUILT_IN_RETURN:
3128
          case BUILT_IN_APPLY_ARGS:
3129
            /* If a __builtin_apply_args caller would be inlined,
3130
               it would be saving arguments of the function it has
3131
               been inlined into.  Similarly __builtin_return would
3132
               return from the function the inline has been inlined into.  */
3133
            inline_forbidden_reason
3134
              = G_("function %q+F can never be inlined because "
3135
                   "it uses __builtin_return or __builtin_apply_args");
3136
            *handled_ops_p = true;
3137
            return t;
3138
 
3139
          default:
3140
            break;
3141
          }
3142
      break;
3143
 
3144
    case GIMPLE_GOTO:
3145
      t = gimple_goto_dest (stmt);
3146
 
3147
      /* We will not inline a function which uses computed goto.  The
3148
         addresses of its local labels, which may be tucked into
3149
         global storage, are of course not constant across
3150
         instantiations, which causes unexpected behavior.  */
3151
      if (TREE_CODE (t) != LABEL_DECL)
3152
        {
3153
          inline_forbidden_reason
3154
            = G_("function %q+F can never be inlined "
3155
                 "because it contains a computed goto");
3156
          *handled_ops_p = true;
3157
          return t;
3158
        }
3159
      break;
3160
 
3161
    default:
3162
      break;
3163
    }
3164
 
3165
  *handled_ops_p = false;
3166
  return NULL_TREE;
3167
}
3168
 
3169
/* Return true if FNDECL is a function that cannot be inlined into
3170
   another one.  */
3171
 
3172
static bool
3173
inline_forbidden_p (tree fndecl)
3174
{
3175
  struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3176
  struct walk_stmt_info wi;
3177
  struct pointer_set_t *visited_nodes;
3178
  basic_block bb;
3179
  bool forbidden_p = false;
3180
 
3181
  /* First check for shared reasons not to copy the code.  */
3182
  inline_forbidden_reason = copy_forbidden (fun, fndecl);
3183
  if (inline_forbidden_reason != NULL)
3184
    return true;
3185
 
3186
  /* Next, walk the statements of the function looking for
3187
     constraucts we can't handle, or are non-optimal for inlining.  */
3188
  visited_nodes = pointer_set_create ();
3189
  memset (&wi, 0, sizeof (wi));
3190
  wi.info = (void *) fndecl;
3191
  wi.pset = visited_nodes;
3192
 
3193
  FOR_EACH_BB_FN (bb, fun)
3194
    {
3195
      gimple ret;
3196
      gimple_seq seq = bb_seq (bb);
3197
      ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3198
      forbidden_p = (ret != NULL);
3199
      if (forbidden_p)
3200
        break;
3201
    }
3202
 
3203
  pointer_set_destroy (visited_nodes);
3204
  return forbidden_p;
3205
}
3206
 
3207
/* Returns nonzero if FN is a function that does not have any
3208
   fundamental inline blocking properties.  */
3209
 
3210
bool
3211
tree_inlinable_function_p (tree fn)
3212
{
3213
  bool inlinable = true;
3214
  bool do_warning;
3215
  tree always_inline;
3216
 
3217
  /* If we've already decided this function shouldn't be inlined,
3218
     there's no need to check again.  */
3219
  if (DECL_UNINLINABLE (fn))
3220
    return false;
3221
 
3222
  /* We only warn for functions declared `inline' by the user.  */
3223
  do_warning = (warn_inline
3224
                && DECL_DECLARED_INLINE_P (fn)
3225
                && !DECL_NO_INLINE_WARNING_P (fn)
3226
                && !DECL_IN_SYSTEM_HEADER (fn));
3227
 
3228
  always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3229
 
3230
  if (flag_no_inline
3231
      && always_inline == NULL)
3232
    {
3233
      if (do_warning)
3234
        warning (OPT_Winline, "function %q+F can never be inlined because it "
3235
                 "is suppressed using -fno-inline", fn);
3236
      inlinable = false;
3237
    }
3238
 
3239
  else if (!function_attribute_inlinable_p (fn))
3240
    {
3241
      if (do_warning)
3242
        warning (OPT_Winline, "function %q+F can never be inlined because it "
3243
                 "uses attributes conflicting with inlining", fn);
3244
      inlinable = false;
3245
    }
3246
 
3247
  else if (inline_forbidden_p (fn))
3248
    {
3249
      /* See if we should warn about uninlinable functions.  Previously,
3250
         some of these warnings would be issued while trying to expand
3251
         the function inline, but that would cause multiple warnings
3252
         about functions that would for example call alloca.  But since
3253
         this a property of the function, just one warning is enough.
3254
         As a bonus we can now give more details about the reason why a
3255
         function is not inlinable.  */
3256
      if (always_inline)
3257
        error (inline_forbidden_reason, fn);
3258
      else if (do_warning)
3259
        warning (OPT_Winline, inline_forbidden_reason, fn);
3260
 
3261
      inlinable = false;
3262
    }
3263
 
3264
  /* Squirrel away the result so that we don't have to check again.  */
3265
  DECL_UNINLINABLE (fn) = !inlinable;
3266
 
3267
  return inlinable;
3268
}
3269
 
3270
/* Estimate the cost of a memory move.  Use machine dependent
3271
   word size and take possible memcpy call into account.  */
3272
 
3273
int
3274
estimate_move_cost (tree type)
3275
{
3276
  HOST_WIDE_INT size;
3277
 
3278
  gcc_assert (!VOID_TYPE_P (type));
3279
 
3280
  if (TREE_CODE (type) == VECTOR_TYPE)
3281
    {
3282
      enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3283
      enum machine_mode simd
3284
        = targetm.vectorize.preferred_simd_mode (inner);
3285
      int simd_mode_size = GET_MODE_SIZE (simd);
3286
      return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3287
              / simd_mode_size);
3288
    }
3289
 
3290
  size = int_size_in_bytes (type);
3291
 
3292
  if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3293
    /* Cost of a memcpy call, 3 arguments and the call.  */
3294
    return 4;
3295
  else
3296
    return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3297
}
3298
 
3299
/* Returns cost of operation CODE, according to WEIGHTS  */
3300
 
3301
static int
3302
estimate_operator_cost (enum tree_code code, eni_weights *weights,
3303
                        tree op1 ATTRIBUTE_UNUSED, tree op2)
3304
{
3305
  switch (code)
3306
    {
3307
    /* These are "free" conversions, or their presumed cost
3308
       is folded into other operations.  */
3309
    case RANGE_EXPR:
3310
    CASE_CONVERT:
3311
    case COMPLEX_EXPR:
3312
    case PAREN_EXPR:
3313
    case VIEW_CONVERT_EXPR:
3314
      return 0;
3315
 
3316
    /* Assign cost of 1 to usual operations.
3317
       ??? We may consider mapping RTL costs to this.  */
3318
    case COND_EXPR:
3319
    case VEC_COND_EXPR:
3320
    case VEC_PERM_EXPR:
3321
 
3322
    case PLUS_EXPR:
3323
    case POINTER_PLUS_EXPR:
3324
    case MINUS_EXPR:
3325
    case MULT_EXPR:
3326
    case FMA_EXPR:
3327
 
3328
    case ADDR_SPACE_CONVERT_EXPR:
3329
    case FIXED_CONVERT_EXPR:
3330
    case FIX_TRUNC_EXPR:
3331
 
3332
    case NEGATE_EXPR:
3333
    case FLOAT_EXPR:
3334
    case MIN_EXPR:
3335
    case MAX_EXPR:
3336
    case ABS_EXPR:
3337
 
3338
    case LSHIFT_EXPR:
3339
    case RSHIFT_EXPR:
3340
    case LROTATE_EXPR:
3341
    case RROTATE_EXPR:
3342
    case VEC_LSHIFT_EXPR:
3343
    case VEC_RSHIFT_EXPR:
3344
 
3345
    case BIT_IOR_EXPR:
3346
    case BIT_XOR_EXPR:
3347
    case BIT_AND_EXPR:
3348
    case BIT_NOT_EXPR:
3349
 
3350
    case TRUTH_ANDIF_EXPR:
3351
    case TRUTH_ORIF_EXPR:
3352
    case TRUTH_AND_EXPR:
3353
    case TRUTH_OR_EXPR:
3354
    case TRUTH_XOR_EXPR:
3355
    case TRUTH_NOT_EXPR:
3356
 
3357
    case LT_EXPR:
3358
    case LE_EXPR:
3359
    case GT_EXPR:
3360
    case GE_EXPR:
3361
    case EQ_EXPR:
3362
    case NE_EXPR:
3363
    case ORDERED_EXPR:
3364
    case UNORDERED_EXPR:
3365
 
3366
    case UNLT_EXPR:
3367
    case UNLE_EXPR:
3368
    case UNGT_EXPR:
3369
    case UNGE_EXPR:
3370
    case UNEQ_EXPR:
3371
    case LTGT_EXPR:
3372
 
3373
    case CONJ_EXPR:
3374
 
3375
    case PREDECREMENT_EXPR:
3376
    case PREINCREMENT_EXPR:
3377
    case POSTDECREMENT_EXPR:
3378
    case POSTINCREMENT_EXPR:
3379
 
3380
    case REALIGN_LOAD_EXPR:
3381
 
3382
    case REDUC_MAX_EXPR:
3383
    case REDUC_MIN_EXPR:
3384
    case REDUC_PLUS_EXPR:
3385
    case WIDEN_SUM_EXPR:
3386
    case WIDEN_MULT_EXPR:
3387
    case DOT_PROD_EXPR:
3388
    case WIDEN_MULT_PLUS_EXPR:
3389
    case WIDEN_MULT_MINUS_EXPR:
3390
    case WIDEN_LSHIFT_EXPR:
3391
 
3392
    case VEC_WIDEN_MULT_HI_EXPR:
3393
    case VEC_WIDEN_MULT_LO_EXPR:
3394
    case VEC_UNPACK_HI_EXPR:
3395
    case VEC_UNPACK_LO_EXPR:
3396
    case VEC_UNPACK_FLOAT_HI_EXPR:
3397
    case VEC_UNPACK_FLOAT_LO_EXPR:
3398
    case VEC_PACK_TRUNC_EXPR:
3399
    case VEC_PACK_SAT_EXPR:
3400
    case VEC_PACK_FIX_TRUNC_EXPR:
3401
    case VEC_WIDEN_LSHIFT_HI_EXPR:
3402
    case VEC_WIDEN_LSHIFT_LO_EXPR:
3403
 
3404
      return 1;
3405
 
3406
    /* Few special cases of expensive operations.  This is useful
3407
       to avoid inlining on functions having too many of these.  */
3408
    case TRUNC_DIV_EXPR:
3409
    case CEIL_DIV_EXPR:
3410
    case FLOOR_DIV_EXPR:
3411
    case ROUND_DIV_EXPR:
3412
    case EXACT_DIV_EXPR:
3413
    case TRUNC_MOD_EXPR:
3414
    case CEIL_MOD_EXPR:
3415
    case FLOOR_MOD_EXPR:
3416
    case ROUND_MOD_EXPR:
3417
    case RDIV_EXPR:
3418
      if (TREE_CODE (op2) != INTEGER_CST)
3419
        return weights->div_mod_cost;
3420
      return 1;
3421
 
3422
    default:
3423
      /* We expect a copy assignment with no operator.  */
3424
      gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3425
      return 0;
3426
    }
3427
}
3428
 
3429
 
3430
/* Estimate number of instructions that will be created by expanding
3431
   the statements in the statement sequence STMTS.
3432
   WEIGHTS contains weights attributed to various constructs.  */
3433
 
3434
static
3435
int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3436
{
3437
  int cost;
3438
  gimple_stmt_iterator gsi;
3439
 
3440
  cost = 0;
3441
  for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3442
    cost += estimate_num_insns (gsi_stmt (gsi), weights);
3443
 
3444
  return cost;
3445
}
3446
 
3447
 
3448
/* Estimate number of instructions that will be created by expanding STMT.
3449
   WEIGHTS contains weights attributed to various constructs.  */
3450
 
3451
int
3452
estimate_num_insns (gimple stmt, eni_weights *weights)
3453
{
3454
  unsigned cost, i;
3455
  enum gimple_code code = gimple_code (stmt);
3456
  tree lhs;
3457
  tree rhs;
3458
 
3459
  switch (code)
3460
    {
3461
    case GIMPLE_ASSIGN:
3462
      /* Try to estimate the cost of assignments.  We have three cases to
3463
         deal with:
3464
         1) Simple assignments to registers;
3465
         2) Stores to things that must live in memory.  This includes
3466
            "normal" stores to scalars, but also assignments of large
3467
            structures, or constructors of big arrays;
3468
 
3469
         Let us look at the first two cases, assuming we have "a = b + C":
3470
         <GIMPLE_ASSIGN <var_decl "a">
3471
                <plus_expr <var_decl "b"> <constant C>>
3472
         If "a" is a GIMPLE register, the assignment to it is free on almost
3473
         any target, because "a" usually ends up in a real register.  Hence
3474
         the only cost of this expression comes from the PLUS_EXPR, and we
3475
         can ignore the GIMPLE_ASSIGN.
3476
         If "a" is not a GIMPLE register, the assignment to "a" will most
3477
         likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3478
         of moving something into "a", which we compute using the function
3479
         estimate_move_cost.  */
3480
      if (gimple_clobber_p (stmt))
3481
        return 0;        /* ={v} {CLOBBER} stmt expands to nothing.  */
3482
 
3483
      lhs = gimple_assign_lhs (stmt);
3484
      rhs = gimple_assign_rhs1 (stmt);
3485
 
3486
      if (is_gimple_reg (lhs))
3487
        cost = 0;
3488
      else
3489
        cost = estimate_move_cost (TREE_TYPE (lhs));
3490
 
3491
      if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3492
        cost += estimate_move_cost (TREE_TYPE (rhs));
3493
 
3494
      cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3495
                                      gimple_assign_rhs1 (stmt),
3496
                                      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3497
                                      == GIMPLE_BINARY_RHS
3498
                                      ? gimple_assign_rhs2 (stmt) : NULL);
3499
      break;
3500
 
3501
    case GIMPLE_COND:
3502
      cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3503
                                         gimple_op (stmt, 0),
3504
                                         gimple_op (stmt, 1));
3505
      break;
3506
 
3507
    case GIMPLE_SWITCH:
3508
      /* Take into account cost of the switch + guess 2 conditional jumps for
3509
         each case label.
3510
 
3511
         TODO: once the switch expansion logic is sufficiently separated, we can
3512
         do better job on estimating cost of the switch.  */
3513
      if (weights->time_based)
3514
        cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3515
      else
3516
        cost = gimple_switch_num_labels (stmt) * 2;
3517
      break;
3518
 
3519
    case GIMPLE_CALL:
3520
      {
3521
        tree decl = gimple_call_fndecl (stmt);
3522
        struct cgraph_node *node = NULL;
3523
 
3524
        /* Do not special case builtins where we see the body.
3525
           This just confuse inliner.  */
3526
        if (!decl || !(node = cgraph_get_node (decl)) || node->analyzed)
3527
          ;
3528
        /* For buitins that are likely expanded to nothing or
3529
           inlined do not account operand costs.  */
3530
        else if (is_simple_builtin (decl))
3531
          return 0;
3532
        else if (is_inexpensive_builtin (decl))
3533
          return weights->target_builtin_call_cost;
3534
        else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3535
          {
3536
            /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3537
               specialize the cheap expansion we do here.
3538
               ???  This asks for a more general solution.  */
3539
            switch (DECL_FUNCTION_CODE (decl))
3540
              {
3541
                case BUILT_IN_POW:
3542
                case BUILT_IN_POWF:
3543
                case BUILT_IN_POWL:
3544
                  if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3545
                      && REAL_VALUES_EQUAL
3546
                           (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3547
                    return estimate_operator_cost (MULT_EXPR, weights,
3548
                                                   gimple_call_arg (stmt, 0),
3549
                                                   gimple_call_arg (stmt, 0));
3550
                  break;
3551
 
3552
                default:
3553
                  break;
3554
              }
3555
          }
3556
 
3557
        cost = node ? weights->call_cost : weights->indirect_call_cost;
3558
        if (gimple_call_lhs (stmt))
3559
          cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3560
        for (i = 0; i < gimple_call_num_args (stmt); i++)
3561
          {
3562
            tree arg = gimple_call_arg (stmt, i);
3563
            cost += estimate_move_cost (TREE_TYPE (arg));
3564
          }
3565
        break;
3566
      }
3567
 
3568
    case GIMPLE_RETURN:
3569
      return weights->return_cost;
3570
 
3571
    case GIMPLE_GOTO:
3572
    case GIMPLE_LABEL:
3573
    case GIMPLE_NOP:
3574
    case GIMPLE_PHI:
3575
    case GIMPLE_PREDICT:
3576
    case GIMPLE_DEBUG:
3577
      return 0;
3578
 
3579
    case GIMPLE_ASM:
3580
      return asm_str_count (gimple_asm_string (stmt));
3581
 
3582
    case GIMPLE_RESX:
3583
      /* This is either going to be an external function call with one
3584
         argument, or two register copy statements plus a goto.  */
3585
      return 2;
3586
 
3587
    case GIMPLE_EH_DISPATCH:
3588
      /* ??? This is going to turn into a switch statement.  Ideally
3589
         we'd have a look at the eh region and estimate the number of
3590
         edges involved.  */
3591
      return 10;
3592
 
3593
    case GIMPLE_BIND:
3594
      return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3595
 
3596
    case GIMPLE_EH_FILTER:
3597
      return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3598
 
3599
    case GIMPLE_CATCH:
3600
      return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3601
 
3602
    case GIMPLE_TRY:
3603
      return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3604
              + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3605
 
3606
    /* OpenMP directives are generally very expensive.  */
3607
 
3608
    case GIMPLE_OMP_RETURN:
3609
    case GIMPLE_OMP_SECTIONS_SWITCH:
3610
    case GIMPLE_OMP_ATOMIC_STORE:
3611
    case GIMPLE_OMP_CONTINUE:
3612
      /* ...except these, which are cheap.  */
3613
      return 0;
3614
 
3615
    case GIMPLE_OMP_ATOMIC_LOAD:
3616
      return weights->omp_cost;
3617
 
3618
    case GIMPLE_OMP_FOR:
3619
      return (weights->omp_cost
3620
              + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3621
              + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3622
 
3623
    case GIMPLE_OMP_PARALLEL:
3624
    case GIMPLE_OMP_TASK:
3625
    case GIMPLE_OMP_CRITICAL:
3626
    case GIMPLE_OMP_MASTER:
3627
    case GIMPLE_OMP_ORDERED:
3628
    case GIMPLE_OMP_SECTION:
3629
    case GIMPLE_OMP_SECTIONS:
3630
    case GIMPLE_OMP_SINGLE:
3631
      return (weights->omp_cost
3632
              + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3633
 
3634
    case GIMPLE_TRANSACTION:
3635
      return (weights->tm_cost
3636
              + estimate_num_insns_seq (gimple_transaction_body (stmt),
3637
                                        weights));
3638
 
3639
    default:
3640
      gcc_unreachable ();
3641
    }
3642
 
3643
  return cost;
3644
}
3645
 
3646
/* Estimate number of instructions that will be created by expanding
3647
   function FNDECL.  WEIGHTS contains weights attributed to various
3648
   constructs.  */
3649
 
3650
int
3651
estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3652
{
3653
  struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3654
  gimple_stmt_iterator bsi;
3655
  basic_block bb;
3656
  int n = 0;
3657
 
3658
  gcc_assert (my_function && my_function->cfg);
3659
  FOR_EACH_BB_FN (bb, my_function)
3660
    {
3661
      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3662
        n += estimate_num_insns (gsi_stmt (bsi), weights);
3663
    }
3664
 
3665
  return n;
3666
}
3667
 
3668
 
3669
/* Initializes weights used by estimate_num_insns.  */
3670
 
3671
void
3672
init_inline_once (void)
3673
{
3674
  eni_size_weights.call_cost = 1;
3675
  eni_size_weights.indirect_call_cost = 3;
3676
  eni_size_weights.target_builtin_call_cost = 1;
3677
  eni_size_weights.div_mod_cost = 1;
3678
  eni_size_weights.omp_cost = 40;
3679
  eni_size_weights.tm_cost = 10;
3680
  eni_size_weights.time_based = false;
3681
  eni_size_weights.return_cost = 1;
3682
 
3683
  /* Estimating time for call is difficult, since we have no idea what the
3684
     called function does.  In the current uses of eni_time_weights,
3685
     underestimating the cost does less harm than overestimating it, so
3686
     we choose a rather small value here.  */
3687
  eni_time_weights.call_cost = 10;
3688
  eni_time_weights.indirect_call_cost = 15;
3689
  eni_time_weights.target_builtin_call_cost = 1;
3690
  eni_time_weights.div_mod_cost = 10;
3691
  eni_time_weights.omp_cost = 40;
3692
  eni_time_weights.tm_cost = 40;
3693
  eni_time_weights.time_based = true;
3694
  eni_time_weights.return_cost = 2;
3695
}
3696
 
3697
/* Estimate the number of instructions in a gimple_seq. */
3698
 
3699
int
3700
count_insns_seq (gimple_seq seq, eni_weights *weights)
3701
{
3702
  gimple_stmt_iterator gsi;
3703
  int n = 0;
3704
  for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3705
    n += estimate_num_insns (gsi_stmt (gsi), weights);
3706
 
3707
  return n;
3708
}
3709
 
3710
 
3711
/* Install new lexical TREE_BLOCK underneath 'current_block'.  */
3712
 
3713
static void
3714
prepend_lexical_block (tree current_block, tree new_block)
3715
{
3716
  BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3717
  BLOCK_SUBBLOCKS (current_block) = new_block;
3718
  BLOCK_SUPERCONTEXT (new_block) = current_block;
3719
}
3720
 
3721
/* Add local variables from CALLEE to CALLER.  */
3722
 
3723
static inline void
3724
add_local_variables (struct function *callee, struct function *caller,
3725
                     copy_body_data *id, bool check_var_ann)
3726
{
3727
  tree var;
3728
  unsigned ix;
3729
 
3730
  FOR_EACH_LOCAL_DECL (callee, ix, var)
3731
    if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3732
      {
3733
        if (!check_var_ann
3734
            || (var_ann (var) && add_referenced_var (var)))
3735
          add_local_decl (caller, var);
3736
      }
3737
    else if (!can_be_nonlocal (var, id))
3738
      {
3739
        tree new_var = remap_decl (var, id);
3740
 
3741
        /* Remap debug-expressions.  */
3742
        if (TREE_CODE (new_var) == VAR_DECL
3743
            && DECL_DEBUG_EXPR_IS_FROM (new_var)
3744
            && new_var != var)
3745
          {
3746
            tree tem = DECL_DEBUG_EXPR (var);
3747
            bool old_regimplify = id->regimplify;
3748
            id->remapping_type_depth++;
3749
            walk_tree (&tem, copy_tree_body_r, id, NULL);
3750
            id->remapping_type_depth--;
3751
            id->regimplify = old_regimplify;
3752
            SET_DECL_DEBUG_EXPR (new_var, tem);
3753
          }
3754
        add_local_decl (caller, new_var);
3755
      }
3756
}
3757
 
3758
/* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
3759
 
3760
static bool
3761
expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3762
{
3763
  tree use_retvar;
3764
  tree fn;
3765
  struct pointer_map_t *st, *dst;
3766
  tree return_slot;
3767
  tree modify_dest;
3768
  location_t saved_location;
3769
  struct cgraph_edge *cg_edge;
3770
  cgraph_inline_failed_t reason;
3771
  basic_block return_block;
3772
  edge e;
3773
  gimple_stmt_iterator gsi, stmt_gsi;
3774
  bool successfully_inlined = FALSE;
3775
  bool purge_dead_abnormal_edges;
3776
 
3777
  /* Set input_location here so we get the right instantiation context
3778
     if we call instantiate_decl from inlinable_function_p.  */
3779
  saved_location = input_location;
3780
  if (gimple_has_location (stmt))
3781
    input_location = gimple_location (stmt);
3782
 
3783
  /* From here on, we're only interested in CALL_EXPRs.  */
3784
  if (gimple_code (stmt) != GIMPLE_CALL)
3785
    goto egress;
3786
 
3787
  cg_edge = cgraph_edge (id->dst_node, stmt);
3788
  gcc_checking_assert (cg_edge);
3789
  /* First, see if we can figure out what function is being called.
3790
     If we cannot, then there is no hope of inlining the function.  */
3791
  if (cg_edge->indirect_unknown_callee)
3792
    goto egress;
3793
  fn = cg_edge->callee->decl;
3794
  gcc_checking_assert (fn);
3795
 
3796
  /* If FN is a declaration of a function in a nested scope that was
3797
     globally declared inline, we don't set its DECL_INITIAL.
3798
     However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3799
     C++ front-end uses it for cdtors to refer to their internal
3800
     declarations, that are not real functions.  Fortunately those
3801
     don't have trees to be saved, so we can tell by checking their
3802
     gimple_body.  */
3803
  if (!DECL_INITIAL (fn)
3804
      && DECL_ABSTRACT_ORIGIN (fn)
3805
      && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3806
    fn = DECL_ABSTRACT_ORIGIN (fn);
3807
 
3808
  /* Don't try to inline functions that are not well-suited to inlining.  */
3809
  if (!cgraph_inline_p (cg_edge, &reason))
3810
    {
3811
      /* If this call was originally indirect, we do not want to emit any
3812
         inlining related warnings or sorry messages because there are no
3813
         guarantees regarding those.  */
3814
      if (cg_edge->indirect_inlining_edge)
3815
        goto egress;
3816
 
3817
      if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3818
          /* Avoid warnings during early inline pass. */
3819
          && cgraph_global_info_ready
3820
          /* PR 20090218-1_0.c. Body can be provided by another module. */
3821
          && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
3822
        {
3823
          error ("inlining failed in call to always_inline %q+F: %s", fn,
3824
                 cgraph_inline_failed_string (reason));
3825
          error ("called from here");
3826
        }
3827
      else if (warn_inline
3828
               && DECL_DECLARED_INLINE_P (fn)
3829
               && !DECL_NO_INLINE_WARNING_P (fn)
3830
               && !DECL_IN_SYSTEM_HEADER (fn)
3831
               && reason != CIF_UNSPECIFIED
3832
               && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3833
               /* Do not warn about not inlined recursive calls.  */
3834
               && !cgraph_edge_recursive_p (cg_edge)
3835
               /* Avoid warnings during early inline pass. */
3836
               && cgraph_global_info_ready)
3837
        {
3838
          warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3839
                   fn, _(cgraph_inline_failed_string (reason)));
3840
          warning (OPT_Winline, "called from here");
3841
        }
3842
      goto egress;
3843
    }
3844
  fn = cg_edge->callee->decl;
3845
 
3846
#ifdef ENABLE_CHECKING
3847
  if (cg_edge->callee->decl != id->dst_node->decl)
3848
    verify_cgraph_node (cg_edge->callee);
3849
#endif
3850
 
3851
  /* We will be inlining this callee.  */
3852
  id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3853
 
3854
  /* Update the callers EH personality.  */
3855
  if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3856
    DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3857
      = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3858
 
3859
  /* Split the block holding the GIMPLE_CALL.  */
3860
  e = split_block (bb, stmt);
3861
  bb = e->src;
3862
  return_block = e->dest;
3863
  remove_edge (e);
3864
 
3865
  /* split_block splits after the statement; work around this by
3866
     moving the call into the second block manually.  Not pretty,
3867
     but seems easier than doing the CFG manipulation by hand
3868
     when the GIMPLE_CALL is in the last statement of BB.  */
3869
  stmt_gsi = gsi_last_bb (bb);
3870
  gsi_remove (&stmt_gsi, false);
3871
 
3872
  /* If the GIMPLE_CALL was in the last statement of BB, it may have
3873
     been the source of abnormal edges.  In this case, schedule
3874
     the removal of dead abnormal edges.  */
3875
  gsi = gsi_start_bb (return_block);
3876
  if (gsi_end_p (gsi))
3877
    {
3878
      gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3879
      purge_dead_abnormal_edges = true;
3880
    }
3881
  else
3882
    {
3883
      gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3884
      purge_dead_abnormal_edges = false;
3885
    }
3886
 
3887
  stmt_gsi = gsi_start_bb (return_block);
3888
 
3889
  /* Build a block containing code to initialize the arguments, the
3890
     actual inline expansion of the body, and a label for the return
3891
     statements within the function to jump to.  The type of the
3892
     statement expression is the return type of the function call.  */
3893
  id->block = make_node (BLOCK);
3894
  BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3895
  BLOCK_SOURCE_LOCATION (id->block) = input_location;
3896
  prepend_lexical_block (gimple_block (stmt), id->block);
3897
 
3898
  /* Local declarations will be replaced by their equivalents in this
3899
     map.  */
3900
  st = id->decl_map;
3901
  id->decl_map = pointer_map_create ();
3902
  dst = id->debug_map;
3903
  id->debug_map = NULL;
3904
 
3905
  /* Record the function we are about to inline.  */
3906
  id->src_fn = fn;
3907
  id->src_node = cg_edge->callee;
3908
  id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3909
  id->gimple_call = stmt;
3910
 
3911
  gcc_assert (!id->src_cfun->after_inlining);
3912
 
3913
  id->entry_bb = bb;
3914
  if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3915
    {
3916
      gimple_stmt_iterator si = gsi_last_bb (bb);
3917
      gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3918
                                                   NOT_TAKEN),
3919
                        GSI_NEW_STMT);
3920
    }
3921
  initialize_inlined_parameters (id, stmt, fn, bb);
3922
 
3923
  if (DECL_INITIAL (fn))
3924
    prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3925
 
3926
  /* Return statements in the function body will be replaced by jumps
3927
     to the RET_LABEL.  */
3928
  gcc_assert (DECL_INITIAL (fn));
3929
  gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3930
 
3931
  /* Find the LHS to which the result of this call is assigned.  */
3932
  return_slot = NULL;
3933
  if (gimple_call_lhs (stmt))
3934
    {
3935
      modify_dest = gimple_call_lhs (stmt);
3936
 
3937
      /* The function which we are inlining might not return a value,
3938
         in which case we should issue a warning that the function
3939
         does not return a value.  In that case the optimizers will
3940
         see that the variable to which the value is assigned was not
3941
         initialized.  We do not want to issue a warning about that
3942
         uninitialized variable.  */
3943
      if (DECL_P (modify_dest))
3944
        TREE_NO_WARNING (modify_dest) = 1;
3945
 
3946
      if (gimple_call_return_slot_opt_p (stmt))
3947
        {
3948
          return_slot = modify_dest;
3949
          modify_dest = NULL;
3950
        }
3951
    }
3952
  else
3953
    modify_dest = NULL;
3954
 
3955
  /* If we are inlining a call to the C++ operator new, we don't want
3956
     to use type based alias analysis on the return value.  Otherwise
3957
     we may get confused if the compiler sees that the inlined new
3958
     function returns a pointer which was just deleted.  See bug
3959
     33407.  */
3960
  if (DECL_IS_OPERATOR_NEW (fn))
3961
    {
3962
      return_slot = NULL;
3963
      modify_dest = NULL;
3964
    }
3965
 
3966
  /* Declare the return variable for the function.  */
3967
  use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
3968
 
3969
  /* Add local vars in this inlined callee to caller.  */
3970
  add_local_variables (id->src_cfun, cfun, id, true);
3971
 
3972
  if (dump_file && (dump_flags & TDF_DETAILS))
3973
    {
3974
      fprintf (dump_file, "Inlining ");
3975
      print_generic_expr (dump_file, id->src_fn, 0);
3976
      fprintf (dump_file, " to ");
3977
      print_generic_expr (dump_file, id->dst_fn, 0);
3978
      fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3979
    }
3980
 
3981
  /* This is it.  Duplicate the callee body.  Assume callee is
3982
     pre-gimplified.  Note that we must not alter the caller
3983
     function in any way before this point, as this CALL_EXPR may be
3984
     a self-referential call; if we're calling ourselves, we need to
3985
     duplicate our body before altering anything.  */
3986
  copy_body (id, bb->count,
3987
             cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3988
             bb, return_block, NULL, NULL);
3989
 
3990
  /* Reset the escaped solution.  */
3991
  if (cfun->gimple_df)
3992
    pt_solution_reset (&cfun->gimple_df->escaped);
3993
 
3994
  /* Clean up.  */
3995
  if (id->debug_map)
3996
    {
3997
      pointer_map_destroy (id->debug_map);
3998
      id->debug_map = dst;
3999
    }
4000
  pointer_map_destroy (id->decl_map);
4001
  id->decl_map = st;
4002
 
4003
  /* Unlink the calls virtual operands before replacing it.  */
4004
  unlink_stmt_vdef (stmt);
4005
 
4006
  /* If the inlined function returns a result that we care about,
4007
     substitute the GIMPLE_CALL with an assignment of the return
4008
     variable to the LHS of the call.  That is, if STMT was
4009
     'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
4010
  if (use_retvar && gimple_call_lhs (stmt))
4011
    {
4012
      gimple old_stmt = stmt;
4013
      stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4014
      gsi_replace (&stmt_gsi, stmt, false);
4015
      if (gimple_in_ssa_p (cfun))
4016
        mark_symbols_for_renaming (stmt);
4017
      maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4018
    }
4019
  else
4020
    {
4021
      /* Handle the case of inlining a function with no return
4022
         statement, which causes the return value to become undefined.  */
4023
      if (gimple_call_lhs (stmt)
4024
          && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4025
        {
4026
          tree name = gimple_call_lhs (stmt);
4027
          tree var = SSA_NAME_VAR (name);
4028
          tree def = gimple_default_def (cfun, var);
4029
 
4030
          if (def)
4031
            {
4032
              /* If the variable is used undefined, make this name
4033
                 undefined via a move.  */
4034
              stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4035
              gsi_replace (&stmt_gsi, stmt, true);
4036
            }
4037
          else
4038
            {
4039
              /* Otherwise make this variable undefined.  */
4040
              gsi_remove (&stmt_gsi, true);
4041
              set_default_def (var, name);
4042
              SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4043
            }
4044
        }
4045
      else
4046
        gsi_remove (&stmt_gsi, true);
4047
    }
4048
 
4049
  if (purge_dead_abnormal_edges)
4050
    {
4051
      gimple_purge_dead_eh_edges (return_block);
4052
      gimple_purge_dead_abnormal_call_edges (return_block);
4053
    }
4054
 
4055
  /* If the value of the new expression is ignored, that's OK.  We
4056
     don't warn about this for CALL_EXPRs, so we shouldn't warn about
4057
     the equivalent inlined version either.  */
4058
  if (is_gimple_assign (stmt))
4059
    {
4060
      gcc_assert (gimple_assign_single_p (stmt)
4061
                  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4062
      TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4063
    }
4064
 
4065
  /* Output the inlining info for this abstract function, since it has been
4066
     inlined.  If we don't do this now, we can lose the information about the
4067
     variables in the function when the blocks get blown away as soon as we
4068
     remove the cgraph node.  */
4069
  (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4070
 
4071
  /* Update callgraph if needed.  */
4072
  cgraph_remove_node (cg_edge->callee);
4073
 
4074
  id->block = NULL_TREE;
4075
  successfully_inlined = TRUE;
4076
 
4077
 egress:
4078
  input_location = saved_location;
4079
  return successfully_inlined;
4080
}
4081
 
4082
/* Expand call statements reachable from STMT_P.
4083
   We can only have CALL_EXPRs as the "toplevel" tree code or nested
4084
   in a MODIFY_EXPR.  */
4085
 
4086
static bool
4087
gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4088
{
4089
  gimple_stmt_iterator gsi;
4090
 
4091
  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4092
    {
4093
      gimple stmt = gsi_stmt (gsi);
4094
 
4095
      if (is_gimple_call (stmt)
4096
          && expand_call_inline (bb, stmt, id))
4097
        return true;
4098
    }
4099
 
4100
  return false;
4101
}
4102
 
4103
 
4104
/* Walk all basic blocks created after FIRST and try to fold every statement
4105
   in the STATEMENTS pointer set.  */
4106
 
4107
static void
4108
fold_marked_statements (int first, struct pointer_set_t *statements)
4109
{
4110
  for (; first < n_basic_blocks; first++)
4111
    if (BASIC_BLOCK (first))
4112
      {
4113
        gimple_stmt_iterator gsi;
4114
 
4115
        for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4116
             !gsi_end_p (gsi);
4117
             gsi_next (&gsi))
4118
          if (pointer_set_contains (statements, gsi_stmt (gsi)))
4119
            {
4120
              gimple old_stmt = gsi_stmt (gsi);
4121
              tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4122
 
4123
              if (old_decl && DECL_BUILT_IN (old_decl))
4124
                {
4125
                  /* Folding builtins can create multiple instructions,
4126
                     we need to look at all of them.  */
4127
                  gimple_stmt_iterator i2 = gsi;
4128
                  gsi_prev (&i2);
4129
                  if (fold_stmt (&gsi))
4130
                    {
4131
                      gimple new_stmt;
4132
                      /* If a builtin at the end of a bb folded into nothing,
4133
                         the following loop won't work.  */
4134
                      if (gsi_end_p (gsi))
4135
                        {
4136
                          cgraph_update_edges_for_call_stmt (old_stmt,
4137
                                                             old_decl, NULL);
4138
                          break;
4139
                        }
4140
                      if (gsi_end_p (i2))
4141
                        i2 = gsi_start_bb (BASIC_BLOCK (first));
4142
                      else
4143
                        gsi_next (&i2);
4144
                      while (1)
4145
                        {
4146
                          new_stmt = gsi_stmt (i2);
4147
                          update_stmt (new_stmt);
4148
                          cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4149
                                                             new_stmt);
4150
 
4151
                          if (new_stmt == gsi_stmt (gsi))
4152
                            {
4153
                              /* It is okay to check only for the very last
4154
                                 of these statements.  If it is a throwing
4155
                                 statement nothing will change.  If it isn't
4156
                                 this can remove EH edges.  If that weren't
4157
                                 correct then because some intermediate stmts
4158
                                 throw, but not the last one.  That would mean
4159
                                 we'd have to split the block, which we can't
4160
                                 here and we'd loose anyway.  And as builtins
4161
                                 probably never throw, this all
4162
                                 is mood anyway.  */
4163
                              if (maybe_clean_or_replace_eh_stmt (old_stmt,
4164
                                                                  new_stmt))
4165
                                gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4166
                              break;
4167
                            }
4168
                          gsi_next (&i2);
4169
                        }
4170
                    }
4171
                }
4172
              else if (fold_stmt (&gsi))
4173
                {
4174
                  /* Re-read the statement from GSI as fold_stmt() may
4175
                     have changed it.  */
4176
                  gimple new_stmt = gsi_stmt (gsi);
4177
                  update_stmt (new_stmt);
4178
 
4179
                  if (is_gimple_call (old_stmt)
4180
                      || is_gimple_call (new_stmt))
4181
                    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4182
                                                       new_stmt);
4183
 
4184
                  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4185
                    gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4186
                }
4187
            }
4188
      }
4189
}
4190
 
4191
/* Return true if BB has at least one abnormal outgoing edge.  */
4192
 
4193
static inline bool
4194
has_abnormal_outgoing_edge_p (basic_block bb)
4195
{
4196
  edge e;
4197
  edge_iterator ei;
4198
 
4199
  FOR_EACH_EDGE (e, ei, bb->succs)
4200
    if (e->flags & EDGE_ABNORMAL)
4201
      return true;
4202
 
4203
  return false;
4204
}
4205
 
4206
/* Expand calls to inline functions in the body of FN.  */
4207
 
4208
unsigned int
4209
optimize_inline_calls (tree fn)
4210
{
4211
  copy_body_data id;
4212
  basic_block bb;
4213
  int last = n_basic_blocks;
4214
  struct gimplify_ctx gctx;
4215
  bool inlined_p = false;
4216
 
4217
  /* Clear out ID.  */
4218
  memset (&id, 0, sizeof (id));
4219
 
4220
  id.src_node = id.dst_node = cgraph_get_node (fn);
4221
  gcc_assert (id.dst_node->analyzed);
4222
  id.dst_fn = fn;
4223
  /* Or any functions that aren't finished yet.  */
4224
  if (current_function_decl)
4225
    id.dst_fn = current_function_decl;
4226
 
4227
  id.copy_decl = copy_decl_maybe_to_var;
4228
  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4229
  id.transform_new_cfg = false;
4230
  id.transform_return_to_modify = true;
4231
  id.transform_lang_insert_block = NULL;
4232
  id.statements_to_fold = pointer_set_create ();
4233
 
4234
  push_gimplify_context (&gctx);
4235
 
4236
  /* We make no attempts to keep dominance info up-to-date.  */
4237
  free_dominance_info (CDI_DOMINATORS);
4238
  free_dominance_info (CDI_POST_DOMINATORS);
4239
 
4240
  /* Register specific gimple functions.  */
4241
  gimple_register_cfg_hooks ();
4242
 
4243
  /* Reach the trees by walking over the CFG, and note the
4244
     enclosing basic-blocks in the call edges.  */
4245
  /* We walk the blocks going forward, because inlined function bodies
4246
     will split id->current_basic_block, and the new blocks will
4247
     follow it; we'll trudge through them, processing their CALL_EXPRs
4248
     along the way.  */
4249
  FOR_EACH_BB (bb)
4250
    inlined_p |= gimple_expand_calls_inline (bb, &id);
4251
 
4252
  pop_gimplify_context (NULL);
4253
 
4254
#ifdef ENABLE_CHECKING
4255
    {
4256
      struct cgraph_edge *e;
4257
 
4258
      verify_cgraph_node (id.dst_node);
4259
 
4260
      /* Double check that we inlined everything we are supposed to inline.  */
4261
      for (e = id.dst_node->callees; e; e = e->next_callee)
4262
        gcc_assert (e->inline_failed);
4263
    }
4264
#endif
4265
 
4266
  /* Fold queued statements.  */
4267
  fold_marked_statements (last, id.statements_to_fold);
4268
  pointer_set_destroy (id.statements_to_fold);
4269
 
4270
  gcc_assert (!id.debug_stmts);
4271
 
4272
  /* If we didn't inline into the function there is nothing to do.  */
4273
  if (!inlined_p)
4274
    return 0;
4275
 
4276
  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
4277
  number_blocks (fn);
4278
 
4279
  delete_unreachable_blocks_update_callgraph (&id);
4280
#ifdef ENABLE_CHECKING
4281
  verify_cgraph_node (id.dst_node);
4282
#endif
4283
 
4284
  /* It would be nice to check SSA/CFG/statement consistency here, but it is
4285
     not possible yet - the IPA passes might make various functions to not
4286
     throw and they don't care to proactively update local EH info.  This is
4287
     done later in fixup_cfg pass that also execute the verification.  */
4288
  return (TODO_update_ssa
4289
          | TODO_cleanup_cfg
4290
          | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4291
          | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4292
          | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4293
}
4294
 
4295
/* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
4296
 
4297
tree
4298
copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4299
{
4300
  enum tree_code code = TREE_CODE (*tp);
4301
  enum tree_code_class cl = TREE_CODE_CLASS (code);
4302
 
4303
  /* We make copies of most nodes.  */
4304
  if (IS_EXPR_CODE_CLASS (cl)
4305
      || code == TREE_LIST
4306
      || code == TREE_VEC
4307
      || code == TYPE_DECL
4308
      || code == OMP_CLAUSE)
4309
    {
4310
      /* Because the chain gets clobbered when we make a copy, we save it
4311
         here.  */
4312
      tree chain = NULL_TREE, new_tree;
4313
 
4314
      if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4315
        chain = TREE_CHAIN (*tp);
4316
 
4317
      /* Copy the node.  */
4318
      new_tree = copy_node (*tp);
4319
 
4320
      /* Propagate mudflap marked-ness.  */
4321
      if (flag_mudflap && mf_marked_p (*tp))
4322
        mf_mark (new_tree);
4323
 
4324
      *tp = new_tree;
4325
 
4326
      /* Now, restore the chain, if appropriate.  That will cause
4327
         walk_tree to walk into the chain as well.  */
4328
      if (code == PARM_DECL
4329
          || code == TREE_LIST
4330
          || code == OMP_CLAUSE)
4331
        TREE_CHAIN (*tp) = chain;
4332
 
4333
      /* For now, we don't update BLOCKs when we make copies.  So, we
4334
         have to nullify all BIND_EXPRs.  */
4335
      if (TREE_CODE (*tp) == BIND_EXPR)
4336
        BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4337
    }
4338
  else if (code == CONSTRUCTOR)
4339
    {
4340
      /* CONSTRUCTOR nodes need special handling because
4341
         we need to duplicate the vector of elements.  */
4342
      tree new_tree;
4343
 
4344
      new_tree = copy_node (*tp);
4345
 
4346
      /* Propagate mudflap marked-ness.  */
4347
      if (flag_mudflap && mf_marked_p (*tp))
4348
        mf_mark (new_tree);
4349
 
4350
      CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4351
                                         CONSTRUCTOR_ELTS (*tp));
4352
      *tp = new_tree;
4353
    }
4354
  else if (code == STATEMENT_LIST)
4355
    /* We used to just abort on STATEMENT_LIST, but we can run into them
4356
       with statement-expressions (c++/40975).  */
4357
    copy_statement_list (tp);
4358
  else if (TREE_CODE_CLASS (code) == tcc_type)
4359
    *walk_subtrees = 0;
4360
  else if (TREE_CODE_CLASS (code) == tcc_declaration)
4361
    *walk_subtrees = 0;
4362
  else if (TREE_CODE_CLASS (code) == tcc_constant)
4363
    *walk_subtrees = 0;
4364
  return NULL_TREE;
4365
}
4366
 
4367
/* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
4368
   information indicating to what new SAVE_EXPR this one should be mapped,
4369
   use that one.  Otherwise, create a new node and enter it in ST.  FN is
4370
   the function into which the copy will be placed.  */
4371
 
4372
static void
4373
remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4374
{
4375
  struct pointer_map_t *st = (struct pointer_map_t *) st_;
4376
  tree *n;
4377
  tree t;
4378
 
4379
  /* See if we already encountered this SAVE_EXPR.  */
4380
  n = (tree *) pointer_map_contains (st, *tp);
4381
 
4382
  /* If we didn't already remap this SAVE_EXPR, do so now.  */
4383
  if (!n)
4384
    {
4385
      t = copy_node (*tp);
4386
 
4387
      /* Remember this SAVE_EXPR.  */
4388
      *pointer_map_insert (st, *tp) = t;
4389
      /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
4390
      *pointer_map_insert (st, t) = t;
4391
    }
4392
  else
4393
    {
4394
      /* We've already walked into this SAVE_EXPR; don't do it again.  */
4395
      *walk_subtrees = 0;
4396
      t = *n;
4397
    }
4398
 
4399
  /* Replace this SAVE_EXPR with the copy.  */
4400
  *tp = t;
4401
}
4402
 
4403
/* Called via walk_tree.  If *TP points to a DECL_STMT for a local label,
4404
   copies the declaration and enters it in the splay_tree in DATA (which is
4405
   really an `copy_body_data *').  */
4406
 
4407
static tree
4408
mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4409
                        void *data)
4410
{
4411
  copy_body_data *id = (copy_body_data *) data;
4412
 
4413
  /* Don't walk into types.  */
4414
  if (TYPE_P (*tp))
4415
    *walk_subtrees = 0;
4416
 
4417
  else if (TREE_CODE (*tp) == LABEL_EXPR)
4418
    {
4419
      tree decl = TREE_OPERAND (*tp, 0);
4420
 
4421
      /* Copy the decl and remember the copy.  */
4422
      insert_decl_map (id, decl, id->copy_decl (decl, id));
4423
    }
4424
 
4425
  return NULL_TREE;
4426
}
4427
 
4428
/* Perform any modifications to EXPR required when it is unsaved.  Does
4429
   not recurse into EXPR's subtrees.  */
4430
 
4431
static void
4432
unsave_expr_1 (tree expr)
4433
{
4434
  switch (TREE_CODE (expr))
4435
    {
4436
    case TARGET_EXPR:
4437
      /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4438
         It's OK for this to happen if it was part of a subtree that
4439
         isn't immediately expanded, such as operand 2 of another
4440
         TARGET_EXPR.  */
4441
      if (TREE_OPERAND (expr, 1))
4442
        break;
4443
 
4444
      TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4445
      TREE_OPERAND (expr, 3) = NULL_TREE;
4446
      break;
4447
 
4448
    default:
4449
      break;
4450
    }
4451
}
4452
 
4453
/* Called via walk_tree when an expression is unsaved.  Using the
4454
   splay_tree pointed to by ST (which is really a `splay_tree'),
4455
   remaps all local declarations to appropriate replacements.  */
4456
 
4457
static tree
4458
unsave_r (tree *tp, int *walk_subtrees, void *data)
4459
{
4460
  copy_body_data *id = (copy_body_data *) data;
4461
  struct pointer_map_t *st = id->decl_map;
4462
  tree *n;
4463
 
4464
  /* Only a local declaration (variable or label).  */
4465
  if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4466
      || TREE_CODE (*tp) == LABEL_DECL)
4467
    {
4468
      /* Lookup the declaration.  */
4469
      n = (tree *) pointer_map_contains (st, *tp);
4470
 
4471
      /* If it's there, remap it.  */
4472
      if (n)
4473
        *tp = *n;
4474
    }
4475
 
4476
  else if (TREE_CODE (*tp) == STATEMENT_LIST)
4477
    gcc_unreachable ();
4478
  else if (TREE_CODE (*tp) == BIND_EXPR)
4479
    copy_bind_expr (tp, walk_subtrees, id);
4480
  else if (TREE_CODE (*tp) == SAVE_EXPR
4481
           || TREE_CODE (*tp) == TARGET_EXPR)
4482
    remap_save_expr (tp, st, walk_subtrees);
4483
  else
4484
    {
4485
      copy_tree_r (tp, walk_subtrees, NULL);
4486
 
4487
      /* Do whatever unsaving is required.  */
4488
      unsave_expr_1 (*tp);
4489
    }
4490
 
4491
  /* Keep iterating.  */
4492
  return NULL_TREE;
4493
}
4494
 
4495
/* Copies everything in EXPR and replaces variables, labels
4496
   and SAVE_EXPRs local to EXPR.  */
4497
 
4498
tree
4499
unsave_expr_now (tree expr)
4500
{
4501
  copy_body_data id;
4502
 
4503
  /* There's nothing to do for NULL_TREE.  */
4504
  if (expr == 0)
4505
    return expr;
4506
 
4507
  /* Set up ID.  */
4508
  memset (&id, 0, sizeof (id));
4509
  id.src_fn = current_function_decl;
4510
  id.dst_fn = current_function_decl;
4511
  id.decl_map = pointer_map_create ();
4512
  id.debug_map = NULL;
4513
 
4514
  id.copy_decl = copy_decl_no_change;
4515
  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4516
  id.transform_new_cfg = false;
4517
  id.transform_return_to_modify = false;
4518
  id.transform_lang_insert_block = NULL;
4519
 
4520
  /* Walk the tree once to find local labels.  */
4521
  walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4522
 
4523
  /* Walk the tree again, copying, remapping, and unsaving.  */
4524
  walk_tree (&expr, unsave_r, &id, NULL);
4525
 
4526
  /* Clean up.  */
4527
  pointer_map_destroy (id.decl_map);
4528
  if (id.debug_map)
4529
    pointer_map_destroy (id.debug_map);
4530
 
4531
  return expr;
4532
}
4533
 
4534
/* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
4535
   label, copies the declaration and enters it in the splay_tree in DATA (which
4536
   is really a 'copy_body_data *'.  */
4537
 
4538
static tree
4539
mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4540
                        bool *handled_ops_p ATTRIBUTE_UNUSED,
4541
                        struct walk_stmt_info *wi)
4542
{
4543
  copy_body_data *id = (copy_body_data *) wi->info;
4544
  gimple stmt = gsi_stmt (*gsip);
4545
 
4546
  if (gimple_code (stmt) == GIMPLE_LABEL)
4547
    {
4548
      tree decl = gimple_label_label (stmt);
4549
 
4550
      /* Copy the decl and remember the copy.  */
4551
      insert_decl_map (id, decl, id->copy_decl (decl, id));
4552
    }
4553
 
4554
  return NULL_TREE;
4555
}
4556
 
4557
 
4558
/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4559
   Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4560
   remaps all local declarations to appropriate replacements in gimple
4561
   operands. */
4562
 
4563
static tree
4564
replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4565
{
4566
  struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4567
  copy_body_data *id = (copy_body_data *) wi->info;
4568
  struct pointer_map_t *st = id->decl_map;
4569
  tree *n;
4570
  tree expr = *tp;
4571
 
4572
  /* Only a local declaration (variable or label).  */
4573
  if ((TREE_CODE (expr) == VAR_DECL
4574
       && !TREE_STATIC (expr))
4575
      || TREE_CODE (expr) == LABEL_DECL)
4576
    {
4577
      /* Lookup the declaration.  */
4578
      n = (tree *) pointer_map_contains (st, expr);
4579
 
4580
      /* If it's there, remap it.  */
4581
      if (n)
4582
        *tp = *n;
4583
      *walk_subtrees = 0;
4584
    }
4585
  else if (TREE_CODE (expr) == STATEMENT_LIST
4586
           || TREE_CODE (expr) == BIND_EXPR
4587
           || TREE_CODE (expr) == SAVE_EXPR)
4588
    gcc_unreachable ();
4589
  else if (TREE_CODE (expr) == TARGET_EXPR)
4590
    {
4591
      /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4592
         It's OK for this to happen if it was part of a subtree that
4593
         isn't immediately expanded, such as operand 2 of another
4594
         TARGET_EXPR.  */
4595
      if (!TREE_OPERAND (expr, 1))
4596
        {
4597
          TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4598
          TREE_OPERAND (expr, 3) = NULL_TREE;
4599
        }
4600
    }
4601
 
4602
  /* Keep iterating.  */
4603
  return NULL_TREE;
4604
}
4605
 
4606
 
4607
/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4608
   Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4609
   remaps all local declarations to appropriate replacements in gimple
4610
   statements. */
4611
 
4612
static tree
4613
replace_locals_stmt (gimple_stmt_iterator *gsip,
4614
                     bool *handled_ops_p ATTRIBUTE_UNUSED,
4615
                     struct walk_stmt_info *wi)
4616
{
4617
  copy_body_data *id = (copy_body_data *) wi->info;
4618
  gimple stmt = gsi_stmt (*gsip);
4619
 
4620
  if (gimple_code (stmt) == GIMPLE_BIND)
4621
    {
4622
      tree block = gimple_bind_block (stmt);
4623
 
4624
      if (block)
4625
        {
4626
          remap_block (&block, id);
4627
          gimple_bind_set_block (stmt, block);
4628
        }
4629
 
4630
      /* This will remap a lot of the same decls again, but this should be
4631
         harmless.  */
4632
      if (gimple_bind_vars (stmt))
4633
        gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4634
    }
4635
 
4636
  /* Keep iterating.  */
4637
  return NULL_TREE;
4638
}
4639
 
4640
 
4641
/* Copies everything in SEQ and replaces variables and labels local to
4642
   current_function_decl.  */
4643
 
4644
gimple_seq
4645
copy_gimple_seq_and_replace_locals (gimple_seq seq)
4646
{
4647
  copy_body_data id;
4648
  struct walk_stmt_info wi;
4649
  struct pointer_set_t *visited;
4650
  gimple_seq copy;
4651
 
4652
  /* There's nothing to do for NULL_TREE.  */
4653
  if (seq == NULL)
4654
    return seq;
4655
 
4656
  /* Set up ID.  */
4657
  memset (&id, 0, sizeof (id));
4658
  id.src_fn = current_function_decl;
4659
  id.dst_fn = current_function_decl;
4660
  id.decl_map = pointer_map_create ();
4661
  id.debug_map = NULL;
4662
 
4663
  id.copy_decl = copy_decl_no_change;
4664
  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4665
  id.transform_new_cfg = false;
4666
  id.transform_return_to_modify = false;
4667
  id.transform_lang_insert_block = NULL;
4668
 
4669
  /* Walk the tree once to find local labels.  */
4670
  memset (&wi, 0, sizeof (wi));
4671
  visited = pointer_set_create ();
4672
  wi.info = &id;
4673
  wi.pset = visited;
4674
  walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4675
  pointer_set_destroy (visited);
4676
 
4677
  copy = gimple_seq_copy (seq);
4678
 
4679
  /* Walk the copy, remapping decls.  */
4680
  memset (&wi, 0, sizeof (wi));
4681
  wi.info = &id;
4682
  walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4683
 
4684
  /* Clean up.  */
4685
  pointer_map_destroy (id.decl_map);
4686
  if (id.debug_map)
4687
    pointer_map_destroy (id.debug_map);
4688
 
4689
  return copy;
4690
}
4691
 
4692
 
4693
/* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
4694
 
4695
static tree
4696
debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4697
{
4698
  if (*tp == data)
4699
    return (tree) data;
4700
  else
4701
    return NULL;
4702
}
4703
 
4704
DEBUG_FUNCTION bool
4705
debug_find_tree (tree top, tree search)
4706
{
4707
  return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4708
}
4709
 
4710
 
4711
/* Declare the variables created by the inliner.  Add all the variables in
4712
   VARS to BIND_EXPR.  */
4713
 
4714
static void
4715
declare_inline_vars (tree block, tree vars)
4716
{
4717
  tree t;
4718
  for (t = vars; t; t = DECL_CHAIN (t))
4719
    {
4720
      DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4721
      gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4722
      add_local_decl (cfun, t);
4723
    }
4724
 
4725
  if (block)
4726
    BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4727
}
4728
 
4729
/* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
4730
   but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
4731
   VAR_DECL translation.  */
4732
 
4733
static tree
4734
copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4735
{
4736
  /* Don't generate debug information for the copy if we wouldn't have
4737
     generated it for the copy either.  */
4738
  DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4739
  DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4740
 
4741
  /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4742
     declaration inspired this copy.  */
4743
  DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4744
 
4745
  /* The new variable/label has no RTL, yet.  */
4746
  if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4747
      && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4748
    SET_DECL_RTL (copy, 0);
4749
 
4750
  /* These args would always appear unused, if not for this.  */
4751
  TREE_USED (copy) = 1;
4752
 
4753
  /* Set the context for the new declaration.  */
4754
  if (!DECL_CONTEXT (decl))
4755
    /* Globals stay global.  */
4756
    ;
4757
  else if (DECL_CONTEXT (decl) != id->src_fn)
4758
    /* Things that weren't in the scope of the function we're inlining
4759
       from aren't in the scope we're inlining to, either.  */
4760
    ;
4761
  else if (TREE_STATIC (decl))
4762
    /* Function-scoped static variables should stay in the original
4763
       function.  */
4764
    ;
4765
  else
4766
    /* Ordinary automatic local variables are now in the scope of the
4767
       new function.  */
4768
    DECL_CONTEXT (copy) = id->dst_fn;
4769
 
4770
  if (TREE_CODE (decl) == VAR_DECL
4771
      /* C++ clones functions during parsing, before
4772
         referenced_vars.  */
4773
      && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
4774
      && referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
4775
                                DECL_UID (decl)))
4776
    add_referenced_var (copy);
4777
 
4778
  return copy;
4779
}
4780
 
4781
static tree
4782
copy_decl_to_var (tree decl, copy_body_data *id)
4783
{
4784
  tree copy, type;
4785
 
4786
  gcc_assert (TREE_CODE (decl) == PARM_DECL
4787
              || TREE_CODE (decl) == RESULT_DECL);
4788
 
4789
  type = TREE_TYPE (decl);
4790
 
4791
  copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4792
                     VAR_DECL, DECL_NAME (decl), type);
4793
  if (DECL_PT_UID_SET_P (decl))
4794
    SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4795
  TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4796
  TREE_READONLY (copy) = TREE_READONLY (decl);
4797
  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4798
  DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4799
 
4800
  return copy_decl_for_dup_finish (id, decl, copy);
4801
}
4802
 
4803
/* Like copy_decl_to_var, but create a return slot object instead of a
4804
   pointer variable for return by invisible reference.  */
4805
 
4806
static tree
4807
copy_result_decl_to_var (tree decl, copy_body_data *id)
4808
{
4809
  tree copy, type;
4810
 
4811
  gcc_assert (TREE_CODE (decl) == PARM_DECL
4812
              || TREE_CODE (decl) == RESULT_DECL);
4813
 
4814
  type = TREE_TYPE (decl);
4815
  if (DECL_BY_REFERENCE (decl))
4816
    type = TREE_TYPE (type);
4817
 
4818
  copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4819
                     VAR_DECL, DECL_NAME (decl), type);
4820
  if (DECL_PT_UID_SET_P (decl))
4821
    SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4822
  TREE_READONLY (copy) = TREE_READONLY (decl);
4823
  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4824
  if (!DECL_BY_REFERENCE (decl))
4825
    {
4826
      TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4827
      DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4828
    }
4829
 
4830
  return copy_decl_for_dup_finish (id, decl, copy);
4831
}
4832
 
4833
tree
4834
copy_decl_no_change (tree decl, copy_body_data *id)
4835
{
4836
  tree copy;
4837
 
4838
  copy = copy_node (decl);
4839
 
4840
  /* The COPY is not abstract; it will be generated in DST_FN.  */
4841
  DECL_ABSTRACT (copy) = 0;
4842
  lang_hooks.dup_lang_specific_decl (copy);
4843
 
4844
  /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4845
     been taken; it's for internal bookkeeping in expand_goto_internal.  */
4846
  if (TREE_CODE (copy) == LABEL_DECL)
4847
    {
4848
      TREE_ADDRESSABLE (copy) = 0;
4849
      LABEL_DECL_UID (copy) = -1;
4850
    }
4851
 
4852
  return copy_decl_for_dup_finish (id, decl, copy);
4853
}
4854
 
4855
static tree
4856
copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4857
{
4858
  if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4859
    return copy_decl_to_var (decl, id);
4860
  else
4861
    return copy_decl_no_change (decl, id);
4862
}
4863
 
4864
/* Return a copy of the function's argument tree.  */
4865
static tree
4866
copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4867
                               bitmap args_to_skip, tree *vars)
4868
{
4869
  tree arg, *parg;
4870
  tree new_parm = NULL;
4871
  int i = 0;
4872
 
4873
  parg = &new_parm;
4874
 
4875
  for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
4876
    if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4877
      {
4878
        tree new_tree = remap_decl (arg, id);
4879
        if (TREE_CODE (new_tree) != PARM_DECL)
4880
          new_tree = id->copy_decl (arg, id);
4881
        lang_hooks.dup_lang_specific_decl (new_tree);
4882
        *parg = new_tree;
4883
        parg = &DECL_CHAIN (new_tree);
4884
      }
4885
    else if (!pointer_map_contains (id->decl_map, arg))
4886
      {
4887
        /* Make an equivalent VAR_DECL.  If the argument was used
4888
           as temporary variable later in function, the uses will be
4889
           replaced by local variable.  */
4890
        tree var = copy_decl_to_var (arg, id);
4891
        add_referenced_var (var);
4892
        insert_decl_map (id, arg, var);
4893
        /* Declare this new variable.  */
4894
        DECL_CHAIN (var) = *vars;
4895
        *vars = var;
4896
      }
4897
  return new_parm;
4898
}
4899
 
4900
/* Return a copy of the function's static chain.  */
4901
static tree
4902
copy_static_chain (tree static_chain, copy_body_data * id)
4903
{
4904
  tree *chain_copy, *pvar;
4905
 
4906
  chain_copy = &static_chain;
4907
  for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
4908
    {
4909
      tree new_tree = remap_decl (*pvar, id);
4910
      lang_hooks.dup_lang_specific_decl (new_tree);
4911
      DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
4912
      *pvar = new_tree;
4913
    }
4914
  return static_chain;
4915
}
4916
 
4917
/* Return true if the function is allowed to be versioned.
4918
   This is a guard for the versioning functionality.  */
4919
 
4920
bool
4921
tree_versionable_function_p (tree fndecl)
4922
{
4923
  return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4924
          && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4925
}
4926
 
4927
/* Delete all unreachable basic blocks and update callgraph.
4928
   Doing so is somewhat nontrivial because we need to update all clones and
4929
   remove inline function that become unreachable.  */
4930
 
4931
static bool
4932
delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4933
{
4934
  bool changed = false;
4935
  basic_block b, next_bb;
4936
 
4937
  find_unreachable_blocks ();
4938
 
4939
  /* Delete all unreachable basic blocks.  */
4940
 
4941
  for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4942
    {
4943
      next_bb = b->next_bb;
4944
 
4945
      if (!(b->flags & BB_REACHABLE))
4946
        {
4947
          gimple_stmt_iterator bsi;
4948
 
4949
          for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4950
            if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4951
              {
4952
                struct cgraph_edge *e;
4953
                struct cgraph_node *node;
4954
 
4955
                if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4956
                  {
4957
                    if (!e->inline_failed)
4958
                      cgraph_remove_node_and_inline_clones (e->callee);
4959
                    else
4960
                      cgraph_remove_edge (e);
4961
                  }
4962
                if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4963
                    && id->dst_node->clones)
4964
                  for (node = id->dst_node->clones; node != id->dst_node;)
4965
                    {
4966
                      if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4967
                        {
4968
                          if (!e->inline_failed)
4969
                            cgraph_remove_node_and_inline_clones (e->callee);
4970
                          else
4971
                            cgraph_remove_edge (e);
4972
                        }
4973
 
4974
                      if (node->clones)
4975
                        node = node->clones;
4976
                      else if (node->next_sibling_clone)
4977
                        node = node->next_sibling_clone;
4978
                      else
4979
                        {
4980
                          while (node != id->dst_node && !node->next_sibling_clone)
4981
                            node = node->clone_of;
4982
                          if (node != id->dst_node)
4983
                            node = node->next_sibling_clone;
4984
                        }
4985
                    }
4986
              }
4987
          delete_basic_block (b);
4988
          changed = true;
4989
        }
4990
    }
4991
 
4992
  return changed;
4993
}
4994
 
4995
/* Update clone info after duplication.  */
4996
 
4997
static void
4998
update_clone_info (copy_body_data * id)
4999
{
5000
  struct cgraph_node *node;
5001
  if (!id->dst_node->clones)
5002
    return;
5003
  for (node = id->dst_node->clones; node != id->dst_node;)
5004
    {
5005
      /* First update replace maps to match the new body.  */
5006
      if (node->clone.tree_map)
5007
        {
5008
          unsigned int i;
5009
          for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
5010
            {
5011
              struct ipa_replace_map *replace_info;
5012
              replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
5013
              walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5014
              walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5015
            }
5016
        }
5017
      if (node->clones)
5018
        node = node->clones;
5019
      else if (node->next_sibling_clone)
5020
        node = node->next_sibling_clone;
5021
      else
5022
        {
5023
          while (node != id->dst_node && !node->next_sibling_clone)
5024
            node = node->clone_of;
5025
          if (node != id->dst_node)
5026
            node = node->next_sibling_clone;
5027
        }
5028
    }
5029
}
5030
 
5031
/* Create a copy of a function's tree.
5032
   OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5033
   of the original function and the new copied function
5034
   respectively.  In case we want to replace a DECL
5035
   tree with another tree while duplicating the function's
5036
   body, TREE_MAP represents the mapping between these
5037
   trees. If UPDATE_CLONES is set, the call_stmt fields
5038
   of edges of clones of the function will be updated.
5039
 
5040
   If non-NULL ARGS_TO_SKIP determine function parameters to remove
5041
   from new version.
5042
   If SKIP_RETURN is true, the new version will return void.
5043
   If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5044
   If non_NULL NEW_ENTRY determine new entry BB of the clone.
5045
*/
5046
void
5047
tree_function_versioning (tree old_decl, tree new_decl,
5048
                          VEC(ipa_replace_map_p,gc)* tree_map,
5049
                          bool update_clones, bitmap args_to_skip,
5050
                          bool skip_return, bitmap blocks_to_copy,
5051
                          basic_block new_entry)
5052
{
5053
  struct cgraph_node *old_version_node;
5054
  struct cgraph_node *new_version_node;
5055
  copy_body_data id;
5056
  tree p;
5057
  unsigned i;
5058
  struct ipa_replace_map *replace_info;
5059
  basic_block old_entry_block, bb;
5060
  VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
5061
 
5062
  tree old_current_function_decl = current_function_decl;
5063
  tree vars = NULL_TREE;
5064
 
5065
  gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5066
              && TREE_CODE (new_decl) == FUNCTION_DECL);
5067
  DECL_POSSIBLY_INLINED (old_decl) = 1;
5068
 
5069
  old_version_node = cgraph_get_node (old_decl);
5070
  gcc_checking_assert (old_version_node);
5071
  new_version_node = cgraph_get_node (new_decl);
5072
  gcc_checking_assert (new_version_node);
5073
 
5074
  /* Copy over debug args.  */
5075
  if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5076
    {
5077
      VEC(tree, gc) **new_debug_args, **old_debug_args;
5078
      gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5079
      DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5080
      old_debug_args = decl_debug_args_lookup (old_decl);
5081
      if (old_debug_args)
5082
        {
5083
          new_debug_args = decl_debug_args_insert (new_decl);
5084
          *new_debug_args = VEC_copy (tree, gc, *old_debug_args);
5085
        }
5086
    }
5087
 
5088
  /* Output the inlining info for this abstract function, since it has been
5089
     inlined.  If we don't do this now, we can lose the information about the
5090
     variables in the function when the blocks get blown away as soon as we
5091
     remove the cgraph node.  */
5092
  (*debug_hooks->outlining_inline_function) (old_decl);
5093
 
5094
  DECL_ARTIFICIAL (new_decl) = 1;
5095
  DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5096
  DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5097
 
5098
  /* Prepare the data structures for the tree copy.  */
5099
  memset (&id, 0, sizeof (id));
5100
 
5101
  /* Generate a new name for the new version. */
5102
  id.statements_to_fold = pointer_set_create ();
5103
 
5104
  id.decl_map = pointer_map_create ();
5105
  id.debug_map = NULL;
5106
  id.src_fn = old_decl;
5107
  id.dst_fn = new_decl;
5108
  id.src_node = old_version_node;
5109
  id.dst_node = new_version_node;
5110
  id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5111
  if (id.src_node->ipa_transforms_to_apply)
5112
    {
5113
      VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
5114
      unsigned int i;
5115
 
5116
      id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
5117
                                                       id.src_node->ipa_transforms_to_apply);
5118
      for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
5119
        VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
5120
                       VEC_index (ipa_opt_pass,
5121
                                  old_transforms_to_apply,
5122
                                  i));
5123
    }
5124
 
5125
  id.copy_decl = copy_decl_no_change;
5126
  id.transform_call_graph_edges
5127
    = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5128
  id.transform_new_cfg = true;
5129
  id.transform_return_to_modify = false;
5130
  id.transform_lang_insert_block = NULL;
5131
 
5132
  current_function_decl = new_decl;
5133
  old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5134
    (DECL_STRUCT_FUNCTION (old_decl));
5135
  initialize_cfun (new_decl, old_decl,
5136
                   old_entry_block->count);
5137
  DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5138
    = id.src_cfun->gimple_df->ipa_pta;
5139
  push_cfun (DECL_STRUCT_FUNCTION (new_decl));
5140
 
5141
  /* Copy the function's static chain.  */
5142
  p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5143
  if (p)
5144
    DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5145
      copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5146
                         &id);
5147
 
5148
  /* If there's a tree_map, prepare for substitution.  */
5149
  if (tree_map)
5150
    for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
5151
      {
5152
        gimple init;
5153
        replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
5154
        if (replace_info->replace_p)
5155
          {
5156
            tree op = replace_info->new_tree;
5157
            if (!replace_info->old_tree)
5158
              {
5159
                int i = replace_info->parm_num;
5160
                tree parm;
5161
                for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5162
                  i --;
5163
                replace_info->old_tree = parm;
5164
              }
5165
 
5166
 
5167
            STRIP_NOPS (op);
5168
 
5169
            if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
5170
              op = TREE_OPERAND (op, 0);
5171
 
5172
            if (TREE_CODE (op) == ADDR_EXPR)
5173
              {
5174
                op = TREE_OPERAND (op, 0);
5175
                while (handled_component_p (op))
5176
                  op = TREE_OPERAND (op, 0);
5177
                if (TREE_CODE (op) == VAR_DECL)
5178
                  add_referenced_var (op);
5179
              }
5180
            gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5181
            init = setup_one_parameter (&id, replace_info->old_tree,
5182
                                        replace_info->new_tree, id.src_fn,
5183
                                        NULL,
5184
                                        &vars);
5185
            if (init)
5186
              VEC_safe_push (gimple, heap, init_stmts, init);
5187
          }
5188
      }
5189
  /* Copy the function's arguments.  */
5190
  if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5191
    DECL_ARGUMENTS (new_decl) =
5192
      copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5193
                                     args_to_skip, &vars);
5194
 
5195
  DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5196
  BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5197
 
5198
  declare_inline_vars (DECL_INITIAL (new_decl), vars);
5199
 
5200
  if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5201
    /* Add local vars.  */
5202
    add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id, false);
5203
 
5204
  if (DECL_RESULT (old_decl) == NULL_TREE)
5205
    ;
5206
  else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5207
    {
5208
      DECL_RESULT (new_decl)
5209
        = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5210
                      RESULT_DECL, NULL_TREE, void_type_node);
5211
      DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5212
      cfun->returns_struct = 0;
5213
      cfun->returns_pcc_struct = 0;
5214
    }
5215
  else
5216
    {
5217
      tree old_name;
5218
      DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5219
      lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5220
      if (gimple_in_ssa_p (id.src_cfun)
5221
          && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5222
          && (old_name
5223
              = gimple_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5224
        {
5225
          tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5226
          insert_decl_map (&id, old_name, new_name);
5227
          SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5228
          set_default_def (DECL_RESULT (new_decl), new_name);
5229
        }
5230
    }
5231
 
5232
  /* Copy the Function's body.  */
5233
  copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5234
             ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5235
 
5236
  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5237
  number_blocks (new_decl);
5238
 
5239
  /* We want to create the BB unconditionally, so that the addition of
5240
     debug stmts doesn't affect BB count, which may in the end cause
5241
     codegen differences.  */
5242
  bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5243
  while (VEC_length (gimple, init_stmts))
5244
    insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5245
  update_clone_info (&id);
5246
 
5247
  /* Remap the nonlocal_goto_save_area, if any.  */
5248
  if (cfun->nonlocal_goto_save_area)
5249
    {
5250
      struct walk_stmt_info wi;
5251
 
5252
      memset (&wi, 0, sizeof (wi));
5253
      wi.info = &id;
5254
      walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5255
    }
5256
 
5257
  /* Clean up.  */
5258
  pointer_map_destroy (id.decl_map);
5259
  if (id.debug_map)
5260
    pointer_map_destroy (id.debug_map);
5261
  free_dominance_info (CDI_DOMINATORS);
5262
  free_dominance_info (CDI_POST_DOMINATORS);
5263
 
5264
  fold_marked_statements (0, id.statements_to_fold);
5265
  pointer_set_destroy (id.statements_to_fold);
5266
  fold_cond_expr_cond ();
5267
  delete_unreachable_blocks_update_callgraph (&id);
5268
  if (id.dst_node->analyzed)
5269
    cgraph_rebuild_references ();
5270
  update_ssa (TODO_update_ssa);
5271
 
5272
  /* After partial cloning we need to rescale frequencies, so they are
5273
     within proper range in the cloned function.  */
5274
  if (new_entry)
5275
    {
5276
      struct cgraph_edge *e;
5277
      rebuild_frequencies ();
5278
 
5279
      new_version_node->count = ENTRY_BLOCK_PTR->count;
5280
      for (e = new_version_node->callees; e; e = e->next_callee)
5281
        {
5282
          basic_block bb = gimple_bb (e->call_stmt);
5283
          e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5284
                                                         bb);
5285
          e->count = bb->count;
5286
        }
5287
      for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5288
        {
5289
          basic_block bb = gimple_bb (e->call_stmt);
5290
          e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5291
                                                         bb);
5292
          e->count = bb->count;
5293
        }
5294
    }
5295
 
5296
  free_dominance_info (CDI_DOMINATORS);
5297
  free_dominance_info (CDI_POST_DOMINATORS);
5298
 
5299
  gcc_assert (!id.debug_stmts);
5300
  VEC_free (gimple, heap, init_stmts);
5301
  pop_cfun ();
5302
  current_function_decl = old_current_function_decl;
5303
  gcc_assert (!current_function_decl
5304
              || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5305
  return;
5306
}
5307
 
5308
/* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
5309
   the callee and return the inlined body on success.  */
5310
 
5311
tree
5312
maybe_inline_call_in_expr (tree exp)
5313
{
5314
  tree fn = get_callee_fndecl (exp);
5315
 
5316
  /* We can only try to inline "const" functions.  */
5317
  if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5318
    {
5319
      struct pointer_map_t *decl_map = pointer_map_create ();
5320
      call_expr_arg_iterator iter;
5321
      copy_body_data id;
5322
      tree param, arg, t;
5323
 
5324
      /* Remap the parameters.  */
5325
      for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5326
           param;
5327
           param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5328
        *pointer_map_insert (decl_map, param) = arg;
5329
 
5330
      memset (&id, 0, sizeof (id));
5331
      id.src_fn = fn;
5332
      id.dst_fn = current_function_decl;
5333
      id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5334
      id.decl_map = decl_map;
5335
 
5336
      id.copy_decl = copy_decl_no_change;
5337
      id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5338
      id.transform_new_cfg = false;
5339
      id.transform_return_to_modify = true;
5340
      id.transform_lang_insert_block = NULL;
5341
 
5342
      /* Make sure not to unshare trees behind the front-end's back
5343
         since front-end specific mechanisms may rely on sharing.  */
5344
      id.regimplify = false;
5345
      id.do_not_unshare = true;
5346
 
5347
      /* We're not inside any EH region.  */
5348
      id.eh_lp_nr = 0;
5349
 
5350
      t = copy_tree_body (&id);
5351
      pointer_map_destroy (decl_map);
5352
 
5353
      /* We can only return something suitable for use in a GENERIC
5354
         expression tree.  */
5355
      if (TREE_CODE (t) == MODIFY_EXPR)
5356
        return TREE_OPERAND (t, 1);
5357
    }
5358
 
5359
   return NULL_TREE;
5360
}
5361
 
5362
/* Duplicate a type, fields and all.  */
5363
 
5364
tree
5365
build_duplicate_type (tree type)
5366
{
5367
  struct copy_body_data id;
5368
 
5369
  memset (&id, 0, sizeof (id));
5370
  id.src_fn = current_function_decl;
5371
  id.dst_fn = current_function_decl;
5372
  id.src_cfun = cfun;
5373
  id.decl_map = pointer_map_create ();
5374
  id.debug_map = NULL;
5375
  id.copy_decl = copy_decl_no_change;
5376
 
5377
  type = remap_type_1 (type, &id);
5378
 
5379
  pointer_map_destroy (id.decl_map);
5380
  if (id.debug_map)
5381
    pointer_map_destroy (id.debug_map);
5382
 
5383
  TYPE_CANONICAL (type) = type;
5384
 
5385
  return type;
5386
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.