OpenCores
URL https://opencores.org/ocsvn/openrisc_2011-10-31/openrisc_2011-10-31/trunk

Subversion Repositories openrisc_2011-10-31

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [tree-inline.c] - Blame information for rev 328

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Tree inlining.
2
   Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4
   Contributed by Alexandre Oliva <aoliva@redhat.com>
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify
9
it under the terms of the GNU General Public License as published by
10
the Free Software Foundation; either version 3, or (at your option)
11
any later version.
12
 
13
GCC is distributed in the hope that it will be useful,
14
but WITHOUT ANY WARRANTY; without even the implied warranty of
15
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16
GNU General Public License for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "toplev.h"
27
#include "tree.h"
28
#include "tree-inline.h"
29
#include "rtl.h"
30
#include "expr.h"
31
#include "flags.h"
32
#include "params.h"
33
#include "input.h"
34
#include "insn-config.h"
35
#include "hashtab.h"
36
#include "langhooks.h"
37
#include "basic-block.h"
38
#include "tree-iterator.h"
39
#include "cgraph.h"
40
#include "intl.h"
41
#include "tree-mudflap.h"
42
#include "tree-flow.h"
43
#include "function.h"
44
#include "ggc.h"
45
#include "tree-flow.h"
46
#include "diagnostic.h"
47
#include "except.h"
48
#include "debug.h"
49
#include "pointer-set.h"
50
#include "ipa-prop.h"
51
#include "value-prof.h"
52
#include "tree-pass.h"
53
#include "target.h"
54
#include "integrate.h"
55
 
56
/* I'm not real happy about this, but we need to handle gimple and
57
   non-gimple trees.  */
58
#include "gimple.h"
59
 
60
/* Inlining, Cloning, Versioning, Parallelization
61
 
62
   Inlining: a function body is duplicated, but the PARM_DECLs are
63
   remapped into VAR_DECLs, and non-void RETURN_EXPRs become
64
   MODIFY_EXPRs that store to a dedicated returned-value variable.
65
   The duplicated eh_region info of the copy will later be appended
66
   to the info for the caller; the eh_region info in copied throwing
67
   statements and RESX statements are adjusted accordingly.
68
 
69
   Cloning: (only in C++) We have one body for a con/de/structor, and
70
   multiple function decls, each with a unique parameter list.
71
   Duplicate the body, using the given splay tree; some parameters
72
   will become constants (like 0 or 1).
73
 
74
   Versioning: a function body is duplicated and the result is a new
75
   function rather than into blocks of an existing function as with
76
   inlining.  Some parameters will become constants.
77
 
78
   Parallelization: a region of a function is duplicated resulting in
79
   a new function.  Variables may be replaced with complex expressions
80
   to enable shared variable semantics.
81
 
82
   All of these will simultaneously lookup any callgraph edges.  If
83
   we're going to inline the duplicated function body, and the given
84
   function has some cloned callgraph nodes (one for each place this
85
   function will be inlined) those callgraph edges will be duplicated.
86
   If we're cloning the body, those callgraph edges will be
87
   updated to point into the new body.  (Note that the original
88
   callgraph node and edge list will not be altered.)
89
 
90
   See the CALL_EXPR handling case in copy_tree_body_r ().  */
91
 
92
/* To Do:
93
 
94
   o In order to make inlining-on-trees work, we pessimized
95
     function-local static constants.  In particular, they are now
96
     always output, even when not addressed.  Fix this by treating
97
     function-local static constants just like global static
98
     constants; the back-end already knows not to output them if they
99
     are not needed.
100
 
101
   o Provide heuristics to clamp inlining of recursive template
102
     calls?  */
103
 
104
 
105
/* Weights that estimate_num_insns uses for heuristics in inlining.  */
106
 
107
eni_weights eni_inlining_weights;
108
 
109
/* Weights that estimate_num_insns uses to estimate the size of the
110
   produced code.  */
111
 
112
eni_weights eni_size_weights;
113
 
114
/* Weights that estimate_num_insns uses to estimate the time necessary
115
   to execute the produced code.  */
116
 
117
eni_weights eni_time_weights;
118
 
119
/* Prototypes.  */
120
 
121
static tree declare_return_variable (copy_body_data *, tree, tree);
122
static void remap_block (tree *, copy_body_data *);
123
static void copy_bind_expr (tree *, int *, copy_body_data *);
124
static tree mark_local_for_remap_r (tree *, int *, void *);
125
static void unsave_expr_1 (tree);
126
static tree unsave_r (tree *, int *, void *);
127
static void declare_inline_vars (tree, tree);
128
static void remap_save_expr (tree *, void *, int *);
129
static void prepend_lexical_block (tree current_block, tree new_block);
130
static tree copy_decl_to_var (tree, copy_body_data *);
131
static tree copy_result_decl_to_var (tree, copy_body_data *);
132
static tree copy_decl_maybe_to_var (tree, copy_body_data *);
133
static gimple remap_gimple_stmt (gimple, copy_body_data *);
134
static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
135
 
136
/* Insert a tree->tree mapping for ID.  Despite the name suggests
137
   that the trees should be variables, it is used for more than that.  */
138
 
139
void
140
insert_decl_map (copy_body_data *id, tree key, tree value)
141
{
142
  *pointer_map_insert (id->decl_map, key) = value;
143
 
144
  /* Always insert an identity map as well.  If we see this same new
145
     node again, we won't want to duplicate it a second time.  */
146
  if (key != value)
147
    *pointer_map_insert (id->decl_map, value) = value;
148
}
149
 
150
/* Insert a tree->tree mapping for ID.  This is only used for
151
   variables.  */
152
 
153
static void
154
insert_debug_decl_map (copy_body_data *id, tree key, tree value)
155
{
156
  if (!gimple_in_ssa_p (id->src_cfun))
157
    return;
158
 
159
  if (!MAY_HAVE_DEBUG_STMTS)
160
    return;
161
 
162
  if (!target_for_debug_bind (key))
163
    return;
164
 
165
  gcc_assert (TREE_CODE (key) == PARM_DECL);
166
  gcc_assert (TREE_CODE (value) == VAR_DECL);
167
 
168
  if (!id->debug_map)
169
    id->debug_map = pointer_map_create ();
170
 
171
  *pointer_map_insert (id->debug_map, key) = value;
172
}
173
 
174
/* If nonzero, we're remapping the contents of inlined debug
175
   statements.  If negative, an error has occurred, such as a
176
   reference to a variable that isn't available in the inlined
177
   context.  */
178
static int processing_debug_stmt = 0;
179
 
180
/* Construct new SSA name for old NAME. ID is the inline context.  */
181
 
182
static tree
183
remap_ssa_name (tree name, copy_body_data *id)
184
{
185
  tree new_tree;
186
  tree *n;
187
 
188
  gcc_assert (TREE_CODE (name) == SSA_NAME);
189
 
190
  n = (tree *) pointer_map_contains (id->decl_map, name);
191
  if (n)
192
    return unshare_expr (*n);
193
 
194
  if (processing_debug_stmt)
195
    {
196
      processing_debug_stmt = -1;
197
      return name;
198
    }
199
 
200
  /* Do not set DEF_STMT yet as statement is not copied yet. We do that
201
     in copy_bb.  */
202
  new_tree = remap_decl (SSA_NAME_VAR (name), id);
203
 
204
  /* We might've substituted constant or another SSA_NAME for
205
     the variable.
206
 
207
     Replace the SSA name representing RESULT_DECL by variable during
208
     inlining:  this saves us from need to introduce PHI node in a case
209
     return value is just partly initialized.  */
210
  if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
211
      && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
212
          || !id->transform_return_to_modify))
213
    {
214
      new_tree = make_ssa_name (new_tree, NULL);
215
      insert_decl_map (id, name, new_tree);
216
      SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
217
        = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
218
      TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
219
      if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
220
        {
221
          /* By inlining function having uninitialized variable, we might
222
             extend the lifetime (variable might get reused).  This cause
223
             ICE in the case we end up extending lifetime of SSA name across
224
             abnormal edge, but also increase register pressure.
225
 
226
             We simply initialize all uninitialized vars by 0 except
227
             for case we are inlining to very first BB.  We can avoid
228
             this for all BBs that are not inside strongly connected
229
             regions of the CFG, but this is expensive to test.  */
230
          if (id->entry_bb
231
              && is_gimple_reg (SSA_NAME_VAR (name))
232
              && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
233
              && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
234
                  || EDGE_COUNT (id->entry_bb->preds) != 1))
235
            {
236
              gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
237
              gimple init_stmt;
238
 
239
              init_stmt = gimple_build_assign (new_tree,
240
                                               fold_convert (TREE_TYPE (new_tree),
241
                                                            integer_zero_node));
242
              gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
243
              SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
244
            }
245
          else
246
            {
247
              SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
248
              if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
249
                  == name)
250
                set_default_def (SSA_NAME_VAR (new_tree), new_tree);
251
            }
252
        }
253
    }
254
  else
255
    insert_decl_map (id, name, new_tree);
256
  return new_tree;
257
}
258
 
259
/* Remap DECL during the copying of the BLOCK tree for the function.  */
260
 
261
tree
262
remap_decl (tree decl, copy_body_data *id)
263
{
264
  tree *n;
265
 
266
  /* We only remap local variables in the current function.  */
267
 
268
  /* See if we have remapped this declaration.  */
269
 
270
  n = (tree *) pointer_map_contains (id->decl_map, decl);
271
 
272
  if (!n && processing_debug_stmt)
273
    {
274
      processing_debug_stmt = -1;
275
      return decl;
276
    }
277
 
278
  /* If we didn't already have an equivalent for this declaration,
279
     create one now.  */
280
  if (!n)
281
    {
282
      /* Make a copy of the variable or label.  */
283
      tree t = id->copy_decl (decl, id);
284
 
285
      /* Remember it, so that if we encounter this local entity again
286
         we can reuse this copy.  Do this early because remap_type may
287
         need this decl for TYPE_STUB_DECL.  */
288
      insert_decl_map (id, decl, t);
289
 
290
      if (!DECL_P (t))
291
        return t;
292
 
293
      /* Remap types, if necessary.  */
294
      TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
295
      if (TREE_CODE (t) == TYPE_DECL)
296
        DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
297
 
298
      /* Remap sizes as necessary.  */
299
      walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
300
      walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
301
 
302
      /* If fields, do likewise for offset and qualifier.  */
303
      if (TREE_CODE (t) == FIELD_DECL)
304
        {
305
          walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
306
          if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
307
            walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
308
        }
309
 
310
      if (cfun && gimple_in_ssa_p (cfun)
311
          && (TREE_CODE (t) == VAR_DECL
312
              || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
313
        {
314
          get_var_ann (t);
315
          add_referenced_var (t);
316
        }
317
      return t;
318
    }
319
 
320
  if (id->do_not_unshare)
321
    return *n;
322
  else
323
    return unshare_expr (*n);
324
}
325
 
326
static tree
327
remap_type_1 (tree type, copy_body_data *id)
328
{
329
  tree new_tree, t;
330
 
331
  /* We do need a copy.  build and register it now.  If this is a pointer or
332
     reference type, remap the designated type and make a new pointer or
333
     reference type.  */
334
  if (TREE_CODE (type) == POINTER_TYPE)
335
    {
336
      new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
337
                                         TYPE_MODE (type),
338
                                         TYPE_REF_CAN_ALIAS_ALL (type));
339
      if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
340
        new_tree = build_type_attribute_qual_variant (new_tree,
341
                                                      TYPE_ATTRIBUTES (type),
342
                                                      TYPE_QUALS (type));
343
      insert_decl_map (id, type, new_tree);
344
      return new_tree;
345
    }
346
  else if (TREE_CODE (type) == REFERENCE_TYPE)
347
    {
348
      new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
349
                                            TYPE_MODE (type),
350
                                            TYPE_REF_CAN_ALIAS_ALL (type));
351
      if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
352
        new_tree = build_type_attribute_qual_variant (new_tree,
353
                                                      TYPE_ATTRIBUTES (type),
354
                                                      TYPE_QUALS (type));
355
      insert_decl_map (id, type, new_tree);
356
      return new_tree;
357
    }
358
  else
359
    new_tree = copy_node (type);
360
 
361
  insert_decl_map (id, type, new_tree);
362
 
363
  /* This is a new type, not a copy of an old type.  Need to reassociate
364
     variants.  We can handle everything except the main variant lazily.  */
365
  t = TYPE_MAIN_VARIANT (type);
366
  if (type != t)
367
    {
368
      t = remap_type (t, id);
369
      TYPE_MAIN_VARIANT (new_tree) = t;
370
      TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
371
      TYPE_NEXT_VARIANT (t) = new_tree;
372
    }
373
  else
374
    {
375
      TYPE_MAIN_VARIANT (new_tree) = new_tree;
376
      TYPE_NEXT_VARIANT (new_tree) = NULL;
377
    }
378
 
379
  if (TYPE_STUB_DECL (type))
380
    TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
381
 
382
  /* Lazily create pointer and reference types.  */
383
  TYPE_POINTER_TO (new_tree) = NULL;
384
  TYPE_REFERENCE_TO (new_tree) = NULL;
385
 
386
  switch (TREE_CODE (new_tree))
387
    {
388
    case INTEGER_TYPE:
389
    case REAL_TYPE:
390
    case FIXED_POINT_TYPE:
391
    case ENUMERAL_TYPE:
392
    case BOOLEAN_TYPE:
393
      t = TYPE_MIN_VALUE (new_tree);
394
      if (t && TREE_CODE (t) != INTEGER_CST)
395
        walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
396
 
397
      t = TYPE_MAX_VALUE (new_tree);
398
      if (t && TREE_CODE (t) != INTEGER_CST)
399
        walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
400
      return new_tree;
401
 
402
    case FUNCTION_TYPE:
403
      TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
404
      walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
405
      return new_tree;
406
 
407
    case ARRAY_TYPE:
408
      TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
409
      TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
410
      break;
411
 
412
    case RECORD_TYPE:
413
    case UNION_TYPE:
414
    case QUAL_UNION_TYPE:
415
      {
416
        tree f, nf = NULL;
417
 
418
        for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
419
          {
420
            t = remap_decl (f, id);
421
            DECL_CONTEXT (t) = new_tree;
422
            TREE_CHAIN (t) = nf;
423
            nf = t;
424
          }
425
        TYPE_FIELDS (new_tree) = nreverse (nf);
426
      }
427
      break;
428
 
429
    case OFFSET_TYPE:
430
    default:
431
      /* Shouldn't have been thought variable sized.  */
432
      gcc_unreachable ();
433
    }
434
 
435
  walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
436
  walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
437
 
438
  return new_tree;
439
}
440
 
441
tree
442
remap_type (tree type, copy_body_data *id)
443
{
444
  tree *node;
445
  tree tmp;
446
 
447
  if (type == NULL)
448
    return type;
449
 
450
  /* See if we have remapped this type.  */
451
  node = (tree *) pointer_map_contains (id->decl_map, type);
452
  if (node)
453
    return *node;
454
 
455
  /* The type only needs remapping if it's variably modified.  */
456
  if (! variably_modified_type_p (type, id->src_fn))
457
    {
458
      insert_decl_map (id, type, type);
459
      return type;
460
    }
461
 
462
  id->remapping_type_depth++;
463
  tmp = remap_type_1 (type, id);
464
  id->remapping_type_depth--;
465
 
466
  return tmp;
467
}
468
 
469
/* Return previously remapped type of TYPE in ID.  Return NULL if TYPE
470
   is NULL or TYPE has not been remapped before.  */
471
 
472
static tree
473
remapped_type (tree type, copy_body_data *id)
474
{
475
  tree *node;
476
 
477
  if (type == NULL)
478
    return type;
479
 
480
  /* See if we have remapped this type.  */
481
  node = (tree *) pointer_map_contains (id->decl_map, type);
482
  if (node)
483
    return *node;
484
  else
485
    return NULL;
486
}
487
 
488
  /* The type only needs remapping if it's variably modified.  */
489
/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
490
 
491
static bool
492
can_be_nonlocal (tree decl, copy_body_data *id)
493
{
494
  /* We can not duplicate function decls.  */
495
  if (TREE_CODE (decl) == FUNCTION_DECL)
496
    return true;
497
 
498
  /* Local static vars must be non-local or we get multiple declaration
499
     problems.  */
500
  if (TREE_CODE (decl) == VAR_DECL
501
      && !auto_var_in_fn_p (decl, id->src_fn))
502
    return true;
503
 
504
  /* At the moment dwarf2out can handle only these types of nodes.  We
505
     can support more later.  */
506
  if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
507
    return false;
508
 
509
  /* We must use global type.  We call remapped_type instead of
510
     remap_type since we don't want to remap this type here if it
511
     hasn't been remapped before.  */
512
  if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
513
    return false;
514
 
515
  /* Wihtout SSA we can't tell if variable is used.  */
516
  if (!gimple_in_ssa_p (cfun))
517
    return false;
518
 
519
  /* Live variables must be copied so we can attach DECL_RTL.  */
520
  if (var_ann (decl))
521
    return false;
522
 
523
  return true;
524
}
525
 
526
static tree
527
remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
528
{
529
  tree old_var;
530
  tree new_decls = NULL_TREE;
531
 
532
  /* Remap its variables.  */
533
  for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
534
    {
535
      tree new_var;
536
 
537
      if (can_be_nonlocal (old_var, id))
538
        {
539
          if (TREE_CODE (old_var) == VAR_DECL
540
              && ! DECL_EXTERNAL (old_var)
541
              && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
542
            cfun->local_decls = tree_cons (NULL_TREE, old_var,
543
                                                   cfun->local_decls);
544
          if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
545
              && !DECL_IGNORED_P (old_var)
546
              && nonlocalized_list)
547
            VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
548
          continue;
549
        }
550
 
551
      /* Remap the variable.  */
552
      new_var = remap_decl (old_var, id);
553
 
554
      /* If we didn't remap this variable, we can't mess with its
555
         TREE_CHAIN.  If we remapped this variable to the return slot, it's
556
         already declared somewhere else, so don't declare it here.  */
557
 
558
      if (new_var == id->retvar)
559
        ;
560
      else if (!new_var)
561
        {
562
          if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
563
              && !DECL_IGNORED_P (old_var)
564
              && nonlocalized_list)
565
            VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
566
        }
567
      else
568
        {
569
          gcc_assert (DECL_P (new_var));
570
          TREE_CHAIN (new_var) = new_decls;
571
          new_decls = new_var;
572
        }
573
    }
574
 
575
  return nreverse (new_decls);
576
}
577
 
578
/* Copy the BLOCK to contain remapped versions of the variables
579
   therein.  And hook the new block into the block-tree.  */
580
 
581
static void
582
remap_block (tree *block, copy_body_data *id)
583
{
584
  tree old_block;
585
  tree new_block;
586
 
587
  /* Make the new block.  */
588
  old_block = *block;
589
  new_block = make_node (BLOCK);
590
  TREE_USED (new_block) = TREE_USED (old_block);
591
  BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
592
  BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
593
  BLOCK_NONLOCALIZED_VARS (new_block)
594
    = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
595
  *block = new_block;
596
 
597
  /* Remap its variables.  */
598
  BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
599
                                        &BLOCK_NONLOCALIZED_VARS (new_block),
600
                                        id);
601
 
602
  if (id->transform_lang_insert_block)
603
    id->transform_lang_insert_block (new_block);
604
 
605
  /* Remember the remapped block.  */
606
  insert_decl_map (id, old_block, new_block);
607
}
608
 
609
/* Copy the whole block tree and root it in id->block.  */
610
static tree
611
remap_blocks (tree block, copy_body_data *id)
612
{
613
  tree t;
614
  tree new_tree = block;
615
 
616
  if (!block)
617
    return NULL;
618
 
619
  remap_block (&new_tree, id);
620
  gcc_assert (new_tree != block);
621
  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
622
    prepend_lexical_block (new_tree, remap_blocks (t, id));
623
  /* Blocks are in arbitrary order, but make things slightly prettier and do
624
     not swap order when producing a copy.  */
625
  BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
626
  return new_tree;
627
}
628
 
629
static void
630
copy_statement_list (tree *tp)
631
{
632
  tree_stmt_iterator oi, ni;
633
  tree new_tree;
634
 
635
  new_tree = alloc_stmt_list ();
636
  ni = tsi_start (new_tree);
637
  oi = tsi_start (*tp);
638
  TREE_TYPE (new_tree) = TREE_TYPE (*tp);
639
  *tp = new_tree;
640
 
641
  for (; !tsi_end_p (oi); tsi_next (&oi))
642
    {
643
      tree stmt = tsi_stmt (oi);
644
      if (TREE_CODE (stmt) == STATEMENT_LIST)
645
        copy_statement_list (&stmt);
646
      tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
647
    }
648
}
649
 
650
static void
651
copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
652
{
653
  tree block = BIND_EXPR_BLOCK (*tp);
654
  tree t;
655
  /* Copy (and replace) the statement.  */
656
  copy_tree_r (tp, walk_subtrees, NULL);
657
  if (block)
658
    {
659
      remap_block (&block, id);
660
      BIND_EXPR_BLOCK (*tp) = block;
661
    }
662
 
663
  if (BIND_EXPR_VARS (*tp))
664
    {
665
      /* This will remap a lot of the same decls again, but this should be
666
         harmless.  */
667
      BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
668
 
669
      /* Also copy value-expressions.  */
670
      for (t = BIND_EXPR_VARS (*tp); t; t = TREE_CHAIN (t))
671
        if (TREE_CODE (t) == VAR_DECL
672
            && DECL_HAS_VALUE_EXPR_P (t))
673
          {
674
            tree tem = DECL_VALUE_EXPR (t);
675
            walk_tree (&tem, copy_tree_body_r, id, NULL);
676
            SET_DECL_VALUE_EXPR (t, tem);
677
          }
678
    }
679
}
680
 
681
 
682
/* Create a new gimple_seq by remapping all the statements in BODY
683
   using the inlining information in ID.  */
684
 
685
gimple_seq
686
remap_gimple_seq (gimple_seq body, copy_body_data *id)
687
{
688
  gimple_stmt_iterator si;
689
  gimple_seq new_body = NULL;
690
 
691
  for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
692
    {
693
      gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
694
      gimple_seq_add_stmt (&new_body, new_stmt);
695
    }
696
 
697
  return new_body;
698
}
699
 
700
 
701
/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
702
   block using the mapping information in ID.  */
703
 
704
static gimple
705
copy_gimple_bind (gimple stmt, copy_body_data *id)
706
{
707
  gimple new_bind;
708
  tree new_block, new_vars;
709
  gimple_seq body, new_body;
710
 
711
  /* Copy the statement.  Note that we purposely don't use copy_stmt
712
     here because we need to remap statements as we copy.  */
713
  body = gimple_bind_body (stmt);
714
  new_body = remap_gimple_seq (body, id);
715
 
716
  new_block = gimple_bind_block (stmt);
717
  if (new_block)
718
    remap_block (&new_block, id);
719
 
720
  /* This will remap a lot of the same decls again, but this should be
721
     harmless.  */
722
  new_vars = gimple_bind_vars (stmt);
723
  if (new_vars)
724
    new_vars = remap_decls (new_vars, NULL, id);
725
 
726
  new_bind = gimple_build_bind (new_vars, new_body, new_block);
727
 
728
  return new_bind;
729
}
730
 
731
 
732
/* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
733
   'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
734
   WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
735
   recursing into the children nodes of *TP.  */
736
 
737
static tree
738
remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
739
{
740
  struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
741
  copy_body_data *id = (copy_body_data *) wi_p->info;
742
  tree fn = id->src_fn;
743
 
744
  if (TREE_CODE (*tp) == SSA_NAME)
745
    {
746
      *tp = remap_ssa_name (*tp, id);
747
      *walk_subtrees = 0;
748
      return NULL;
749
    }
750
  else if (auto_var_in_fn_p (*tp, fn))
751
    {
752
      /* Local variables and labels need to be replaced by equivalent
753
         variables.  We don't want to copy static variables; there's
754
         only one of those, no matter how many times we inline the
755
         containing function.  Similarly for globals from an outer
756
         function.  */
757
      tree new_decl;
758
 
759
      /* Remap the declaration.  */
760
      new_decl = remap_decl (*tp, id);
761
      gcc_assert (new_decl);
762
      /* Replace this variable with the copy.  */
763
      STRIP_TYPE_NOPS (new_decl);
764
      /* ???  The C++ frontend uses void * pointer zero to initialize
765
         any other type.  This confuses the middle-end type verification.
766
         As cloned bodies do not go through gimplification again the fixup
767
         there doesn't trigger.  */
768
      if (TREE_CODE (new_decl) == INTEGER_CST
769
          && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
770
        new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
771
      *tp = new_decl;
772
      *walk_subtrees = 0;
773
    }
774
  else if (TREE_CODE (*tp) == STATEMENT_LIST)
775
    gcc_unreachable ();
776
  else if (TREE_CODE (*tp) == SAVE_EXPR)
777
    gcc_unreachable ();
778
  else if (TREE_CODE (*tp) == LABEL_DECL
779
           && (!DECL_CONTEXT (*tp)
780
               || decl_function_context (*tp) == id->src_fn))
781
    /* These may need to be remapped for EH handling.  */
782
    *tp = remap_decl (*tp, id);
783
  else if (TYPE_P (*tp))
784
    /* Types may need remapping as well.  */
785
    *tp = remap_type (*tp, id);
786
  else if (CONSTANT_CLASS_P (*tp))
787
    {
788
      /* If this is a constant, we have to copy the node iff the type
789
         will be remapped.  copy_tree_r will not copy a constant.  */
790
      tree new_type = remap_type (TREE_TYPE (*tp), id);
791
 
792
      if (new_type == TREE_TYPE (*tp))
793
        *walk_subtrees = 0;
794
 
795
      else if (TREE_CODE (*tp) == INTEGER_CST)
796
        *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
797
                                  TREE_INT_CST_HIGH (*tp));
798
      else
799
        {
800
          *tp = copy_node (*tp);
801
          TREE_TYPE (*tp) = new_type;
802
        }
803
    }
804
  else
805
    {
806
      /* Otherwise, just copy the node.  Note that copy_tree_r already
807
         knows not to copy VAR_DECLs, etc., so this is safe.  */
808
      if (TREE_CODE (*tp) == INDIRECT_REF)
809
        {
810
          /* Get rid of *& from inline substitutions that can happen when a
811
             pointer argument is an ADDR_EXPR.  */
812
          tree decl = TREE_OPERAND (*tp, 0);
813
          tree *n;
814
 
815
          n = (tree *) pointer_map_contains (id->decl_map, decl);
816
          if (n)
817
            {
818
              tree type, new_tree, old;
819
 
820
              /* If we happen to get an ADDR_EXPR in n->value, strip
821
                 it manually here as we'll eventually get ADDR_EXPRs
822
                 which lie about their types pointed to.  In this case
823
                 build_fold_indirect_ref wouldn't strip the
824
                 INDIRECT_REF, but we absolutely rely on that.  As
825
                 fold_indirect_ref does other useful transformations,
826
                 try that first, though.  */
827
              type = TREE_TYPE (TREE_TYPE (*n));
828
              new_tree = unshare_expr (*n);
829
              old = *tp;
830
              *tp = gimple_fold_indirect_ref (new_tree);
831
              if (!*tp)
832
                {
833
                  if (TREE_CODE (new_tree) == ADDR_EXPR)
834
                    {
835
                      *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
836
                                                 type, new_tree);
837
                      /* ???  We should either assert here or build
838
                         a VIEW_CONVERT_EXPR instead of blindly leaking
839
                         incompatible types to our IL.  */
840
                      if (! *tp)
841
                        *tp = TREE_OPERAND (new_tree, 0);
842
                    }
843
                  else
844
                    {
845
                      *tp = build1 (INDIRECT_REF, type, new_tree);
846
                      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
847
                      TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
848
                    }
849
                }
850
              *walk_subtrees = 0;
851
              return NULL;
852
            }
853
        }
854
 
855
      /* Here is the "usual case".  Copy this tree node, and then
856
         tweak some special cases.  */
857
      copy_tree_r (tp, walk_subtrees, NULL);
858
 
859
      /* Global variables we haven't seen yet need to go into referenced
860
         vars.  If not referenced from types only.  */
861
      if (gimple_in_ssa_p (cfun)
862
          && TREE_CODE (*tp) == VAR_DECL
863
          && id->remapping_type_depth == 0
864
          && !processing_debug_stmt)
865
        add_referenced_var (*tp);
866
 
867
      /* We should never have TREE_BLOCK set on non-statements.  */
868
      if (EXPR_P (*tp))
869
        gcc_assert (!TREE_BLOCK (*tp));
870
 
871
      if (TREE_CODE (*tp) != OMP_CLAUSE)
872
        TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
873
 
874
      if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
875
        {
876
          /* The copied TARGET_EXPR has never been expanded, even if the
877
             original node was expanded already.  */
878
          TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
879
          TREE_OPERAND (*tp, 3) = NULL_TREE;
880
        }
881
      else if (TREE_CODE (*tp) == ADDR_EXPR)
882
        {
883
          /* Variable substitution need not be simple.  In particular,
884
             the INDIRECT_REF substitution above.  Make sure that
885
             TREE_CONSTANT and friends are up-to-date.  But make sure
886
             to not improperly set TREE_BLOCK on some sub-expressions.  */
887
          int invariant = is_gimple_min_invariant (*tp);
888
          tree block = id->block;
889
          id->block = NULL_TREE;
890
          walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
891
          id->block = block;
892
 
893
          /* Handle the case where we substituted an INDIRECT_REF
894
             into the operand of the ADDR_EXPR.  */
895
          if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
896
            *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
897
          else
898
            recompute_tree_invariant_for_addr_expr (*tp);
899
 
900
          /* If this used to be invariant, but is not any longer,
901
             then regimplification is probably needed.  */
902
          if (invariant && !is_gimple_min_invariant (*tp))
903
            id->regimplify = true;
904
 
905
          *walk_subtrees = 0;
906
        }
907
    }
908
 
909
  /* Keep iterating.  */
910
  return NULL_TREE;
911
}
912
 
913
 
914
/* Called from copy_body_id via walk_tree.  DATA is really a
915
   `copy_body_data *'.  */
916
 
917
tree
918
copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
919
{
920
  copy_body_data *id = (copy_body_data *) data;
921
  tree fn = id->src_fn;
922
  tree new_block;
923
 
924
  /* Begin by recognizing trees that we'll completely rewrite for the
925
     inlining context.  Our output for these trees is completely
926
     different from out input (e.g. RETURN_EXPR is deleted, and morphs
927
     into an edge).  Further down, we'll handle trees that get
928
     duplicated and/or tweaked.  */
929
 
930
  /* When requested, RETURN_EXPRs should be transformed to just the
931
     contained MODIFY_EXPR.  The branch semantics of the return will
932
     be handled elsewhere by manipulating the CFG rather than a statement.  */
933
  if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
934
    {
935
      tree assignment = TREE_OPERAND (*tp, 0);
936
 
937
      /* If we're returning something, just turn that into an
938
         assignment into the equivalent of the original RESULT_DECL.
939
         If the "assignment" is just the result decl, the result
940
         decl has already been set (e.g. a recent "foo (&result_decl,
941
         ...)"); just toss the entire RETURN_EXPR.  */
942
      if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
943
        {
944
          /* Replace the RETURN_EXPR with (a copy of) the
945
             MODIFY_EXPR hanging underneath.  */
946
          *tp = copy_node (assignment);
947
        }
948
      else /* Else the RETURN_EXPR returns no value.  */
949
        {
950
          *tp = NULL;
951
          return (tree) (void *)1;
952
        }
953
    }
954
  else if (TREE_CODE (*tp) == SSA_NAME)
955
    {
956
      *tp = remap_ssa_name (*tp, id);
957
      *walk_subtrees = 0;
958
      return NULL;
959
    }
960
 
961
  /* Local variables and labels need to be replaced by equivalent
962
     variables.  We don't want to copy static variables; there's only
963
     one of those, no matter how many times we inline the containing
964
     function.  Similarly for globals from an outer function.  */
965
  else if (auto_var_in_fn_p (*tp, fn))
966
    {
967
      tree new_decl;
968
 
969
      /* Remap the declaration.  */
970
      new_decl = remap_decl (*tp, id);
971
      gcc_assert (new_decl);
972
      /* Replace this variable with the copy.  */
973
      STRIP_TYPE_NOPS (new_decl);
974
      *tp = new_decl;
975
      *walk_subtrees = 0;
976
    }
977
  else if (TREE_CODE (*tp) == STATEMENT_LIST)
978
    copy_statement_list (tp);
979
  else if (TREE_CODE (*tp) == SAVE_EXPR
980
           || TREE_CODE (*tp) == TARGET_EXPR)
981
    remap_save_expr (tp, id->decl_map, walk_subtrees);
982
  else if (TREE_CODE (*tp) == LABEL_DECL
983
           && (! DECL_CONTEXT (*tp)
984
               || decl_function_context (*tp) == id->src_fn))
985
    /* These may need to be remapped for EH handling.  */
986
    *tp = remap_decl (*tp, id);
987
  else if (TREE_CODE (*tp) == BIND_EXPR)
988
    copy_bind_expr (tp, walk_subtrees, id);
989
  /* Types may need remapping as well.  */
990
  else if (TYPE_P (*tp))
991
    *tp = remap_type (*tp, id);
992
 
993
  /* If this is a constant, we have to copy the node iff the type will be
994
     remapped.  copy_tree_r will not copy a constant.  */
995
  else if (CONSTANT_CLASS_P (*tp))
996
    {
997
      tree new_type = remap_type (TREE_TYPE (*tp), id);
998
 
999
      if (new_type == TREE_TYPE (*tp))
1000
        *walk_subtrees = 0;
1001
 
1002
      else if (TREE_CODE (*tp) == INTEGER_CST)
1003
        *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1004
                                  TREE_INT_CST_HIGH (*tp));
1005
      else
1006
        {
1007
          *tp = copy_node (*tp);
1008
          TREE_TYPE (*tp) = new_type;
1009
        }
1010
    }
1011
 
1012
  /* Otherwise, just copy the node.  Note that copy_tree_r already
1013
     knows not to copy VAR_DECLs, etc., so this is safe.  */
1014
  else
1015
    {
1016
      /* Here we handle trees that are not completely rewritten.
1017
         First we detect some inlining-induced bogosities for
1018
         discarding.  */
1019
      if (TREE_CODE (*tp) == MODIFY_EXPR
1020
          && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1021
          && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1022
        {
1023
          /* Some assignments VAR = VAR; don't generate any rtl code
1024
             and thus don't count as variable modification.  Avoid
1025
             keeping bogosities like 0 = 0.  */
1026
          tree decl = TREE_OPERAND (*tp, 0), value;
1027
          tree *n;
1028
 
1029
          n = (tree *) pointer_map_contains (id->decl_map, decl);
1030
          if (n)
1031
            {
1032
              value = *n;
1033
              STRIP_TYPE_NOPS (value);
1034
              if (TREE_CONSTANT (value) || TREE_READONLY (value))
1035
                {
1036
                  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1037
                  return copy_tree_body_r (tp, walk_subtrees, data);
1038
                }
1039
            }
1040
        }
1041
      else if (TREE_CODE (*tp) == INDIRECT_REF)
1042
        {
1043
          /* Get rid of *& from inline substitutions that can happen when a
1044
             pointer argument is an ADDR_EXPR.  */
1045
          tree decl = TREE_OPERAND (*tp, 0);
1046
          tree *n;
1047
 
1048
          n = (tree *) pointer_map_contains (id->decl_map, decl);
1049
          if (n)
1050
            {
1051
              tree new_tree;
1052
              tree old;
1053
              /* If we happen to get an ADDR_EXPR in n->value, strip
1054
                 it manually here as we'll eventually get ADDR_EXPRs
1055
                 which lie about their types pointed to.  In this case
1056
                 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1057
                 but we absolutely rely on that.  As fold_indirect_ref
1058
                 does other useful transformations, try that first, though.  */
1059
              tree type = TREE_TYPE (TREE_TYPE (*n));
1060
              if (id->do_not_unshare)
1061
                new_tree = *n;
1062
              else
1063
                new_tree = unshare_expr (*n);
1064
              old = *tp;
1065
              *tp = gimple_fold_indirect_ref (new_tree);
1066
              if (! *tp)
1067
                {
1068
                  if (TREE_CODE (new_tree) == ADDR_EXPR)
1069
                    {
1070
                      *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1071
                                                 type, new_tree);
1072
                      /* ???  We should either assert here or build
1073
                         a VIEW_CONVERT_EXPR instead of blindly leaking
1074
                         incompatible types to our IL.  */
1075
                      if (! *tp)
1076
                        *tp = TREE_OPERAND (new_tree, 0);
1077
                    }
1078
                  else
1079
                    {
1080
                      *tp = build1 (INDIRECT_REF, type, new_tree);
1081
                      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1082
                      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1083
                    }
1084
                }
1085
              *walk_subtrees = 0;
1086
              return NULL;
1087
            }
1088
        }
1089
 
1090
      /* Here is the "usual case".  Copy this tree node, and then
1091
         tweak some special cases.  */
1092
      copy_tree_r (tp, walk_subtrees, NULL);
1093
 
1094
      /* Global variables we haven't seen yet needs to go into referenced
1095
         vars.  If not referenced from types or debug stmts only.  */
1096
      if (gimple_in_ssa_p (cfun)
1097
          && TREE_CODE (*tp) == VAR_DECL
1098
          && id->remapping_type_depth == 0
1099
          && !processing_debug_stmt)
1100
        add_referenced_var (*tp);
1101
 
1102
      /* If EXPR has block defined, map it to newly constructed block.
1103
         When inlining we want EXPRs without block appear in the block
1104
         of function call if we are not remapping a type.  */
1105
      if (EXPR_P (*tp))
1106
        {
1107
          new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1108
          if (TREE_BLOCK (*tp))
1109
            {
1110
              tree *n;
1111
              n = (tree *) pointer_map_contains (id->decl_map,
1112
                                                 TREE_BLOCK (*tp));
1113
              gcc_assert (n);
1114
              new_block = *n;
1115
            }
1116
          TREE_BLOCK (*tp) = new_block;
1117
        }
1118
 
1119
      if (TREE_CODE (*tp) != OMP_CLAUSE)
1120
        TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1121
 
1122
      /* The copied TARGET_EXPR has never been expanded, even if the
1123
         original node was expanded already.  */
1124
      if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1125
        {
1126
          TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1127
          TREE_OPERAND (*tp, 3) = NULL_TREE;
1128
        }
1129
 
1130
      /* Variable substitution need not be simple.  In particular, the
1131
         INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1132
         and friends are up-to-date.  */
1133
      else if (TREE_CODE (*tp) == ADDR_EXPR)
1134
        {
1135
          int invariant = is_gimple_min_invariant (*tp);
1136
          walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1137
 
1138
          /* Handle the case where we substituted an INDIRECT_REF
1139
             into the operand of the ADDR_EXPR.  */
1140
          if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1141
            *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1142
          else
1143
            recompute_tree_invariant_for_addr_expr (*tp);
1144
 
1145
          /* If this used to be invariant, but is not any longer,
1146
             then regimplification is probably needed.  */
1147
          if (invariant && !is_gimple_min_invariant (*tp))
1148
            id->regimplify = true;
1149
 
1150
          *walk_subtrees = 0;
1151
        }
1152
    }
1153
 
1154
  /* Keep iterating.  */
1155
  return NULL_TREE;
1156
}
1157
 
1158
/* Helper for remap_gimple_stmt.  Given an EH region number for the
1159
   source function, map that to the duplicate EH region number in
1160
   the destination function.  */
1161
 
1162
static int
1163
remap_eh_region_nr (int old_nr, copy_body_data *id)
1164
{
1165
  eh_region old_r, new_r;
1166
  void **slot;
1167
 
1168
  old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1169
  slot = pointer_map_contains (id->eh_map, old_r);
1170
  new_r = (eh_region) *slot;
1171
 
1172
  return new_r->index;
1173
}
1174
 
1175
/* Similar, but operate on INTEGER_CSTs.  */
1176
 
1177
static tree
1178
remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1179
{
1180
  int old_nr, new_nr;
1181
 
1182
  old_nr = tree_low_cst (old_t_nr, 0);
1183
  new_nr = remap_eh_region_nr (old_nr, id);
1184
 
1185
  return build_int_cst (NULL, new_nr);
1186
}
1187
 
1188
/* Helper for copy_bb.  Remap statement STMT using the inlining
1189
   information in ID.  Return the new statement copy.  */
1190
 
1191
static gimple
1192
remap_gimple_stmt (gimple stmt, copy_body_data *id)
1193
{
1194
  gimple copy = NULL;
1195
  struct walk_stmt_info wi;
1196
  tree new_block;
1197
  bool skip_first = false;
1198
 
1199
  /* Begin by recognizing trees that we'll completely rewrite for the
1200
     inlining context.  Our output for these trees is completely
1201
     different from out input (e.g. RETURN_EXPR is deleted, and morphs
1202
     into an edge).  Further down, we'll handle trees that get
1203
     duplicated and/or tweaked.  */
1204
 
1205
  /* When requested, GIMPLE_RETURNs should be transformed to just the
1206
     contained GIMPLE_ASSIGN.  The branch semantics of the return will
1207
     be handled elsewhere by manipulating the CFG rather than the
1208
     statement.  */
1209
  if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1210
    {
1211
      tree retval = gimple_return_retval (stmt);
1212
 
1213
      /* If we're returning something, just turn that into an
1214
         assignment into the equivalent of the original RESULT_DECL.
1215
         If RETVAL is just the result decl, the result decl has
1216
         already been set (e.g. a recent "foo (&result_decl, ...)");
1217
         just toss the entire GIMPLE_RETURN.  */
1218
      if (retval && TREE_CODE (retval) != RESULT_DECL)
1219
        {
1220
          copy = gimple_build_assign (id->retvar, retval);
1221
          /* id->retvar is already substituted.  Skip it on later remapping.  */
1222
          skip_first = true;
1223
        }
1224
      else
1225
        return gimple_build_nop ();
1226
    }
1227
  else if (gimple_has_substatements (stmt))
1228
    {
1229
      gimple_seq s1, s2;
1230
 
1231
      /* When cloning bodies from the C++ front end, we will be handed bodies
1232
         in High GIMPLE form.  Handle here all the High GIMPLE statements that
1233
         have embedded statements.  */
1234
      switch (gimple_code (stmt))
1235
        {
1236
        case GIMPLE_BIND:
1237
          copy = copy_gimple_bind (stmt, id);
1238
          break;
1239
 
1240
        case GIMPLE_CATCH:
1241
          s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1242
          copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1243
          break;
1244
 
1245
        case GIMPLE_EH_FILTER:
1246
          s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1247
          copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1248
          break;
1249
 
1250
        case GIMPLE_TRY:
1251
          s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1252
          s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1253
          copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1254
          break;
1255
 
1256
        case GIMPLE_WITH_CLEANUP_EXPR:
1257
          s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1258
          copy = gimple_build_wce (s1);
1259
          break;
1260
 
1261
        case GIMPLE_OMP_PARALLEL:
1262
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1263
          copy = gimple_build_omp_parallel
1264
                   (s1,
1265
                    gimple_omp_parallel_clauses (stmt),
1266
                    gimple_omp_parallel_child_fn (stmt),
1267
                    gimple_omp_parallel_data_arg (stmt));
1268
          break;
1269
 
1270
        case GIMPLE_OMP_TASK:
1271
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1272
          copy = gimple_build_omp_task
1273
                   (s1,
1274
                    gimple_omp_task_clauses (stmt),
1275
                    gimple_omp_task_child_fn (stmt),
1276
                    gimple_omp_task_data_arg (stmt),
1277
                    gimple_omp_task_copy_fn (stmt),
1278
                    gimple_omp_task_arg_size (stmt),
1279
                    gimple_omp_task_arg_align (stmt));
1280
          break;
1281
 
1282
        case GIMPLE_OMP_FOR:
1283
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1284
          s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1285
          copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1286
                                       gimple_omp_for_collapse (stmt), s2);
1287
          {
1288
            size_t i;
1289
            for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1290
              {
1291
                gimple_omp_for_set_index (copy, i,
1292
                                          gimple_omp_for_index (stmt, i));
1293
                gimple_omp_for_set_initial (copy, i,
1294
                                            gimple_omp_for_initial (stmt, i));
1295
                gimple_omp_for_set_final (copy, i,
1296
                                          gimple_omp_for_final (stmt, i));
1297
                gimple_omp_for_set_incr (copy, i,
1298
                                         gimple_omp_for_incr (stmt, i));
1299
                gimple_omp_for_set_cond (copy, i,
1300
                                         gimple_omp_for_cond (stmt, i));
1301
              }
1302
          }
1303
          break;
1304
 
1305
        case GIMPLE_OMP_MASTER:
1306
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1307
          copy = gimple_build_omp_master (s1);
1308
          break;
1309
 
1310
        case GIMPLE_OMP_ORDERED:
1311
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1312
          copy = gimple_build_omp_ordered (s1);
1313
          break;
1314
 
1315
        case GIMPLE_OMP_SECTION:
1316
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1317
          copy = gimple_build_omp_section (s1);
1318
          break;
1319
 
1320
        case GIMPLE_OMP_SECTIONS:
1321
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1322
          copy = gimple_build_omp_sections
1323
                   (s1, gimple_omp_sections_clauses (stmt));
1324
          break;
1325
 
1326
        case GIMPLE_OMP_SINGLE:
1327
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1328
          copy = gimple_build_omp_single
1329
                   (s1, gimple_omp_single_clauses (stmt));
1330
          break;
1331
 
1332
        case GIMPLE_OMP_CRITICAL:
1333
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1334
          copy
1335
            = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1336
          break;
1337
 
1338
        default:
1339
          gcc_unreachable ();
1340
        }
1341
    }
1342
  else
1343
    {
1344
      if (gimple_assign_copy_p (stmt)
1345
          && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1346
          && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1347
        {
1348
          /* Here we handle statements that are not completely rewritten.
1349
             First we detect some inlining-induced bogosities for
1350
             discarding.  */
1351
 
1352
          /* Some assignments VAR = VAR; don't generate any rtl code
1353
             and thus don't count as variable modification.  Avoid
1354
             keeping bogosities like 0 = 0.  */
1355
          tree decl = gimple_assign_lhs (stmt), value;
1356
          tree *n;
1357
 
1358
          n = (tree *) pointer_map_contains (id->decl_map, decl);
1359
          if (n)
1360
            {
1361
              value = *n;
1362
              STRIP_TYPE_NOPS (value);
1363
              if (TREE_CONSTANT (value) || TREE_READONLY (value))
1364
                return gimple_build_nop ();
1365
            }
1366
        }
1367
 
1368
      if (gimple_debug_bind_p (stmt))
1369
        {
1370
          copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1371
                                          gimple_debug_bind_get_value (stmt),
1372
                                          stmt);
1373
          VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1374
          return copy;
1375
        }
1376
 
1377
      /* Create a new deep copy of the statement.  */
1378
      copy = gimple_copy (stmt);
1379
 
1380
      /* Remap the region numbers for __builtin_eh_{pointer,filter},
1381
         RESX and EH_DISPATCH.  */
1382
      if (id->eh_map)
1383
        switch (gimple_code (copy))
1384
          {
1385
          case GIMPLE_CALL:
1386
            {
1387
              tree r, fndecl = gimple_call_fndecl (copy);
1388
              if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1389
                switch (DECL_FUNCTION_CODE (fndecl))
1390
                  {
1391
                  case BUILT_IN_EH_COPY_VALUES:
1392
                    r = gimple_call_arg (copy, 1);
1393
                    r = remap_eh_region_tree_nr (r, id);
1394
                    gimple_call_set_arg (copy, 1, r);
1395
                    /* FALLTHRU */
1396
 
1397
                  case BUILT_IN_EH_POINTER:
1398
                  case BUILT_IN_EH_FILTER:
1399
                    r = gimple_call_arg (copy, 0);
1400
                    r = remap_eh_region_tree_nr (r, id);
1401
                    gimple_call_set_arg (copy, 0, r);
1402
                    break;
1403
 
1404
                  default:
1405
                    break;
1406
                  }
1407
            }
1408
            break;
1409
 
1410
          case GIMPLE_RESX:
1411
            {
1412
              int r = gimple_resx_region (copy);
1413
              r = remap_eh_region_nr (r, id);
1414
              gimple_resx_set_region (copy, r);
1415
            }
1416
            break;
1417
 
1418
          case GIMPLE_EH_DISPATCH:
1419
            {
1420
              int r = gimple_eh_dispatch_region (copy);
1421
              r = remap_eh_region_nr (r, id);
1422
              gimple_eh_dispatch_set_region (copy, r);
1423
            }
1424
            break;
1425
 
1426
          default:
1427
            break;
1428
          }
1429
    }
1430
 
1431
  /* If STMT has a block defined, map it to the newly constructed
1432
     block.  When inlining we want statements without a block to
1433
     appear in the block of the function call.  */
1434
  new_block = id->block;
1435
  if (gimple_block (copy))
1436
    {
1437
      tree *n;
1438
      n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1439
      gcc_assert (n);
1440
      new_block = *n;
1441
    }
1442
 
1443
  gimple_set_block (copy, new_block);
1444
 
1445
  if (gimple_debug_bind_p (copy))
1446
    return copy;
1447
 
1448
  /* Remap all the operands in COPY.  */
1449
  memset (&wi, 0, sizeof (wi));
1450
  wi.info = id;
1451
  if (skip_first)
1452
    walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1453
  else
1454
    walk_gimple_op (copy, remap_gimple_op_r, &wi);
1455
 
1456
  /* Clear the copied virtual operands.  We are not remapping them here
1457
     but are going to recreate them from scratch.  */
1458
  if (gimple_has_mem_ops (copy))
1459
    {
1460
      gimple_set_vdef (copy, NULL_TREE);
1461
      gimple_set_vuse (copy, NULL_TREE);
1462
    }
1463
 
1464
  return copy;
1465
}
1466
 
1467
 
1468
/* Copy basic block, scale profile accordingly.  Edges will be taken care of
1469
   later  */
1470
 
1471
static basic_block
1472
copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1473
         gcov_type count_scale)
1474
{
1475
  gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1476
  basic_block copy_basic_block;
1477
  tree decl;
1478
  gcov_type freq;
1479
 
1480
  /* create_basic_block() will append every new block to
1481
     basic_block_info automatically.  */
1482
  copy_basic_block = create_basic_block (NULL, (void *) 0,
1483
                                         (basic_block) bb->prev_bb->aux);
1484
  copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1485
 
1486
  /* We are going to rebuild frequencies from scratch.  These values
1487
     have just small importance to drive canonicalize_loop_headers.  */
1488
  freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1489
 
1490
  /* We recompute frequencies after inlining, so this is quite safe.  */
1491
  if (freq > BB_FREQ_MAX)
1492
    freq = BB_FREQ_MAX;
1493
  copy_basic_block->frequency = freq;
1494
 
1495
  copy_gsi = gsi_start_bb (copy_basic_block);
1496
 
1497
  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1498
    {
1499
      gimple stmt = gsi_stmt (gsi);
1500
      gimple orig_stmt = stmt;
1501
 
1502
      id->regimplify = false;
1503
      stmt = remap_gimple_stmt (stmt, id);
1504
      if (gimple_nop_p (stmt))
1505
        continue;
1506
 
1507
      gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1508
      seq_gsi = copy_gsi;
1509
 
1510
      /* With return slot optimization we can end up with
1511
         non-gimple (foo *)&this->m, fix that here.  */
1512
      if (is_gimple_assign (stmt)
1513
          && gimple_assign_rhs_code (stmt) == NOP_EXPR
1514
          && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1515
        {
1516
          tree new_rhs;
1517
          new_rhs = force_gimple_operand_gsi (&seq_gsi,
1518
                                              gimple_assign_rhs1 (stmt),
1519
                                              true, NULL, false, GSI_NEW_STMT);
1520
          gimple_assign_set_rhs1 (stmt, new_rhs);
1521
          id->regimplify = false;
1522
        }
1523
 
1524
      gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1525
 
1526
      if (id->regimplify)
1527
        gimple_regimplify_operands (stmt, &seq_gsi);
1528
 
1529
      /* If copy_basic_block has been empty at the start of this iteration,
1530
         call gsi_start_bb again to get at the newly added statements.  */
1531
      if (gsi_end_p (copy_gsi))
1532
        copy_gsi = gsi_start_bb (copy_basic_block);
1533
      else
1534
        gsi_next (&copy_gsi);
1535
 
1536
      /* Process the new statement.  The call to gimple_regimplify_operands
1537
         possibly turned the statement into multiple statements, we
1538
         need to process all of them.  */
1539
      do
1540
        {
1541
          tree fn;
1542
 
1543
          stmt = gsi_stmt (copy_gsi);
1544
          if (is_gimple_call (stmt)
1545
              && gimple_call_va_arg_pack_p (stmt)
1546
              && id->gimple_call)
1547
            {
1548
              /* __builtin_va_arg_pack () should be replaced by
1549
                 all arguments corresponding to ... in the caller.  */
1550
              tree p;
1551
              gimple new_call;
1552
              VEC(tree, heap) *argarray;
1553
              size_t nargs = gimple_call_num_args (id->gimple_call);
1554
              size_t n;
1555
 
1556
              for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1557
                nargs--;
1558
 
1559
              /* Create the new array of arguments.  */
1560
              n = nargs + gimple_call_num_args (stmt);
1561
              argarray = VEC_alloc (tree, heap, n);
1562
              VEC_safe_grow (tree, heap, argarray, n);
1563
 
1564
              /* Copy all the arguments before '...'  */
1565
              memcpy (VEC_address (tree, argarray),
1566
                      gimple_call_arg_ptr (stmt, 0),
1567
                      gimple_call_num_args (stmt) * sizeof (tree));
1568
 
1569
              /* Append the arguments passed in '...'  */
1570
              memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1571
                      gimple_call_arg_ptr (id->gimple_call, 0)
1572
                        + (gimple_call_num_args (id->gimple_call) - nargs),
1573
                      nargs * sizeof (tree));
1574
 
1575
              new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1576
                                                argarray);
1577
 
1578
              VEC_free (tree, heap, argarray);
1579
 
1580
              /* Copy all GIMPLE_CALL flags, location and block, except
1581
                 GF_CALL_VA_ARG_PACK.  */
1582
              gimple_call_copy_flags (new_call, stmt);
1583
              gimple_call_set_va_arg_pack (new_call, false);
1584
              gimple_set_location (new_call, gimple_location (stmt));
1585
              gimple_set_block (new_call, gimple_block (stmt));
1586
              gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1587
 
1588
              gsi_replace (&copy_gsi, new_call, false);
1589
              stmt = new_call;
1590
            }
1591
          else if (is_gimple_call (stmt)
1592
                   && id->gimple_call
1593
                   && (decl = gimple_call_fndecl (stmt))
1594
                   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1595
                   && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1596
            {
1597
              /* __builtin_va_arg_pack_len () should be replaced by
1598
                 the number of anonymous arguments.  */
1599
              size_t nargs = gimple_call_num_args (id->gimple_call);
1600
              tree count, p;
1601
              gimple new_stmt;
1602
 
1603
              for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1604
                nargs--;
1605
 
1606
              count = build_int_cst (integer_type_node, nargs);
1607
              new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1608
              gsi_replace (&copy_gsi, new_stmt, false);
1609
              stmt = new_stmt;
1610
            }
1611
 
1612
          /* Statements produced by inlining can be unfolded, especially
1613
             when we constant propagated some operands.  We can't fold
1614
             them right now for two reasons:
1615
             1) folding require SSA_NAME_DEF_STMTs to be correct
1616
             2) we can't change function calls to builtins.
1617
             So we just mark statement for later folding.  We mark
1618
             all new statements, instead just statements that has changed
1619
             by some nontrivial substitution so even statements made
1620
             foldable indirectly are updated.  If this turns out to be
1621
             expensive, copy_body can be told to watch for nontrivial
1622
             changes.  */
1623
          if (id->statements_to_fold)
1624
            pointer_set_insert (id->statements_to_fold, stmt);
1625
 
1626
          /* We're duplicating a CALL_EXPR.  Find any corresponding
1627
             callgraph edges and update or duplicate them.  */
1628
          if (is_gimple_call (stmt))
1629
            {
1630
              struct cgraph_edge *edge;
1631
              int flags;
1632
 
1633
              switch (id->transform_call_graph_edges)
1634
                {
1635
                case CB_CGE_DUPLICATE:
1636
                  edge = cgraph_edge (id->src_node, orig_stmt);
1637
                  if (edge)
1638
                    {
1639
                      int edge_freq = edge->frequency;
1640
                      edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1641
                                                gimple_uid (stmt),
1642
                                                REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1643
                                                edge->frequency, true);
1644
                      /* We could also just rescale the frequency, but
1645
                         doing so would introduce roundoff errors and make
1646
                         verifier unhappy.  */
1647
                      edge->frequency
1648
                        = compute_call_stmt_bb_frequency (id->dst_node->decl,
1649
                                                          copy_basic_block);
1650
                      if (dump_file
1651
                          && profile_status_for_function (cfun) != PROFILE_ABSENT
1652
                          && (edge_freq > edge->frequency + 10
1653
                              || edge_freq < edge->frequency - 10))
1654
                        {
1655
                          fprintf (dump_file, "Edge frequency estimated by "
1656
                                   "cgraph %i diverge from inliner's estimate %i\n",
1657
                                   edge_freq,
1658
                                   edge->frequency);
1659
                          fprintf (dump_file,
1660
                                   "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1661
                                   bb->index,
1662
                                   bb->frequency,
1663
                                   copy_basic_block->frequency);
1664
                        }
1665
                      stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1666
                    }
1667
                  break;
1668
 
1669
                case CB_CGE_MOVE_CLONES:
1670
                  cgraph_set_call_stmt_including_clones (id->dst_node,
1671
                                                         orig_stmt, stmt);
1672
                  edge = cgraph_edge (id->dst_node, stmt);
1673
                  break;
1674
 
1675
                case CB_CGE_MOVE:
1676
                  edge = cgraph_edge (id->dst_node, orig_stmt);
1677
                  if (edge)
1678
                    cgraph_set_call_stmt (edge, stmt);
1679
                  break;
1680
 
1681
                default:
1682
                  gcc_unreachable ();
1683
                }
1684
 
1685
              /* Constant propagation on argument done during inlining
1686
                 may create new direct call.  Produce an edge for it.  */
1687
              if ((!edge
1688
                   || (edge->indirect_call
1689
                       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1690
                  && is_gimple_call (stmt)
1691
                  && (fn = gimple_call_fndecl (stmt)) != NULL)
1692
                {
1693
                  struct cgraph_node *dest = cgraph_node (fn);
1694
 
1695
                  /* We have missing edge in the callgraph.  This can happen
1696
                     when previous inlining turned an indirect call into a
1697
                     direct call by constant propagating arguments or we are
1698
                     producing dead clone (for further clonning).  In all
1699
                     other cases we hit a bug (incorrect node sharing is the
1700
                     most common reason for missing edges).  */
1701
                  gcc_assert (dest->needed || !dest->analyzed
1702
                              || !id->src_node->analyzed);
1703
                  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1704
                    cgraph_create_edge_including_clones
1705
                      (id->dst_node, dest, orig_stmt, stmt, bb->count,
1706
                       compute_call_stmt_bb_frequency (id->dst_node->decl,
1707
                                                       copy_basic_block),
1708
                       bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1709
                  else
1710
                    cgraph_create_edge (id->dst_node, dest, stmt,
1711
                                        bb->count,
1712
                                        compute_call_stmt_bb_frequency
1713
                                          (id->dst_node->decl, copy_basic_block),
1714
                                        bb->loop_depth)->inline_failed
1715
                      = CIF_ORIGINALLY_INDIRECT_CALL;
1716
                  if (dump_file)
1717
                    {
1718
                      fprintf (dump_file, "Created new direct edge to %s",
1719
                               cgraph_node_name (dest));
1720
                    }
1721
                }
1722
 
1723
              flags = gimple_call_flags (stmt);
1724
              if (flags & ECF_MAY_BE_ALLOCA)
1725
                cfun->calls_alloca = true;
1726
              if (flags & ECF_RETURNS_TWICE)
1727
                cfun->calls_setjmp = true;
1728
            }
1729
 
1730
          maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1731
                                      id->eh_map, id->eh_lp_nr);
1732
 
1733
          if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1734
            {
1735
              ssa_op_iter i;
1736
              tree def;
1737
 
1738
              find_new_referenced_vars (gsi_stmt (copy_gsi));
1739
              FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1740
                if (TREE_CODE (def) == SSA_NAME)
1741
                  SSA_NAME_DEF_STMT (def) = stmt;
1742
            }
1743
 
1744
          gsi_next (&copy_gsi);
1745
        }
1746
      while (!gsi_end_p (copy_gsi));
1747
 
1748
      copy_gsi = gsi_last_bb (copy_basic_block);
1749
    }
1750
 
1751
  return copy_basic_block;
1752
}
1753
 
1754
/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1755
   form is quite easy, since dominator relationship for old basic blocks does
1756
   not change.
1757
 
1758
   There is however exception where inlining might change dominator relation
1759
   across EH edges from basic block within inlined functions destinating
1760
   to landing pads in function we inline into.
1761
 
1762
   The function fills in PHI_RESULTs of such PHI nodes if they refer
1763
   to gimple regs.  Otherwise, the function mark PHI_RESULT of such
1764
   PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
1765
   EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1766
   set, and this means that there will be no overlapping live ranges
1767
   for the underlying symbol.
1768
 
1769
   This might change in future if we allow redirecting of EH edges and
1770
   we might want to change way build CFG pre-inlining to include
1771
   all the possible edges then.  */
1772
static void
1773
update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1774
                                  bool can_throw, bool nonlocal_goto)
1775
{
1776
  edge e;
1777
  edge_iterator ei;
1778
 
1779
  FOR_EACH_EDGE (e, ei, bb->succs)
1780
    if (!e->dest->aux
1781
        || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1782
      {
1783
        gimple phi;
1784
        gimple_stmt_iterator si;
1785
 
1786
        if (!nonlocal_goto)
1787
          gcc_assert (e->flags & EDGE_EH);
1788
 
1789
        if (!can_throw)
1790
          gcc_assert (!(e->flags & EDGE_EH));
1791
 
1792
        for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1793
          {
1794
            edge re;
1795
 
1796
            phi = gsi_stmt (si);
1797
 
1798
            /* There shouldn't be any PHI nodes in the ENTRY_BLOCK.  */
1799
            gcc_assert (!e->dest->aux);
1800
 
1801
            gcc_assert ((e->flags & EDGE_EH)
1802
                        || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1803
 
1804
            if (!is_gimple_reg (PHI_RESULT (phi)))
1805
              {
1806
                mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1807
                continue;
1808
              }
1809
 
1810
            re = find_edge (ret_bb, e->dest);
1811
            gcc_assert (re);
1812
            gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1813
                        == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1814
 
1815
            SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1816
                     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1817
          }
1818
      }
1819
}
1820
 
1821
 
1822
/* Copy edges from BB into its copy constructed earlier, scale profile
1823
   accordingly.  Edges will be taken care of later.  Assume aux
1824
   pointers to point to the copies of each BB.  Return true if any
1825
   debug stmts are left after a statement that must end the basic block.  */
1826
 
1827
static bool
1828
copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1829
{
1830
  basic_block new_bb = (basic_block) bb->aux;
1831
  edge_iterator ei;
1832
  edge old_edge;
1833
  gimple_stmt_iterator si;
1834
  int flags;
1835
  bool need_debug_cleanup = false;
1836
 
1837
  /* Use the indices from the original blocks to create edges for the
1838
     new ones.  */
1839
  FOR_EACH_EDGE (old_edge, ei, bb->succs)
1840
    if (!(old_edge->flags & EDGE_EH))
1841
      {
1842
        edge new_edge;
1843
 
1844
        flags = old_edge->flags;
1845
 
1846
        /* Return edges do get a FALLTHRU flag when the get inlined.  */
1847
        if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1848
            && old_edge->dest->aux != EXIT_BLOCK_PTR)
1849
          flags |= EDGE_FALLTHRU;
1850
        new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1851
        new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1852
        new_edge->probability = old_edge->probability;
1853
      }
1854
 
1855
  if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1856
    return false;
1857
 
1858
  for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1859
    {
1860
      gimple copy_stmt;
1861
      bool can_throw, nonlocal_goto;
1862
 
1863
      copy_stmt = gsi_stmt (si);
1864
      if (!is_gimple_debug (copy_stmt))
1865
        {
1866
          update_stmt (copy_stmt);
1867
          if (gimple_in_ssa_p (cfun))
1868
            mark_symbols_for_renaming (copy_stmt);
1869
        }
1870
 
1871
      /* Do this before the possible split_block.  */
1872
      gsi_next (&si);
1873
 
1874
      /* If this tree could throw an exception, there are two
1875
         cases where we need to add abnormal edge(s): the
1876
         tree wasn't in a region and there is a "current
1877
         region" in the caller; or the original tree had
1878
         EH edges.  In both cases split the block after the tree,
1879
         and add abnormal edge(s) as needed; we need both
1880
         those from the callee and the caller.
1881
         We check whether the copy can throw, because the const
1882
         propagation can change an INDIRECT_REF which throws
1883
         into a COMPONENT_REF which doesn't.  If the copy
1884
         can throw, the original could also throw.  */
1885
      can_throw = stmt_can_throw_internal (copy_stmt);
1886
      nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1887
 
1888
      if (can_throw || nonlocal_goto)
1889
        {
1890
          if (!gsi_end_p (si))
1891
            {
1892
              while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1893
                gsi_next (&si);
1894
              if (gsi_end_p (si))
1895
                need_debug_cleanup = true;
1896
            }
1897
          if (!gsi_end_p (si))
1898
            /* Note that bb's predecessor edges aren't necessarily
1899
               right at this point; split_block doesn't care.  */
1900
            {
1901
              edge e = split_block (new_bb, copy_stmt);
1902
 
1903
              new_bb = e->dest;
1904
              new_bb->aux = e->src->aux;
1905
              si = gsi_start_bb (new_bb);
1906
            }
1907
        }
1908
 
1909
      if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1910
        make_eh_dispatch_edges (copy_stmt);
1911
      else if (can_throw)
1912
        make_eh_edges (copy_stmt);
1913
 
1914
      if (nonlocal_goto)
1915
        make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1916
 
1917
      if ((can_throw || nonlocal_goto)
1918
          && gimple_in_ssa_p (cfun))
1919
        update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1920
                                          can_throw, nonlocal_goto);
1921
    }
1922
  return need_debug_cleanup;
1923
}
1924
 
1925
/* Copy the PHIs.  All blocks and edges are copied, some blocks
1926
   was possibly split and new outgoing EH edges inserted.
1927
   BB points to the block of original function and AUX pointers links
1928
   the original and newly copied blocks.  */
1929
 
1930
static void
1931
copy_phis_for_bb (basic_block bb, copy_body_data *id)
1932
{
1933
  basic_block const new_bb = (basic_block) bb->aux;
1934
  edge_iterator ei;
1935
  gimple phi;
1936
  gimple_stmt_iterator si;
1937
 
1938
  for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1939
    {
1940
      tree res, new_res;
1941
      gimple new_phi;
1942
      edge new_edge;
1943
 
1944
      phi = gsi_stmt (si);
1945
      res = PHI_RESULT (phi);
1946
      new_res = res;
1947
      if (is_gimple_reg (res))
1948
        {
1949
          walk_tree (&new_res, copy_tree_body_r, id, NULL);
1950
          SSA_NAME_DEF_STMT (new_res)
1951
            = new_phi = create_phi_node (new_res, new_bb);
1952
          FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1953
            {
1954
              edge const old_edge
1955
                = find_edge ((basic_block) new_edge->src->aux, bb);
1956
              tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1957
              tree new_arg = arg;
1958
              tree block = id->block;
1959
              id->block = NULL_TREE;
1960
              walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1961
              id->block = block;
1962
              gcc_assert (new_arg);
1963
              /* With return slot optimization we can end up with
1964
                 non-gimple (foo *)&this->m, fix that here.  */
1965
              if (TREE_CODE (new_arg) != SSA_NAME
1966
                  && TREE_CODE (new_arg) != FUNCTION_DECL
1967
                  && !is_gimple_val (new_arg))
1968
                {
1969
                  gimple_seq stmts = NULL;
1970
                  new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1971
                  gsi_insert_seq_on_edge_immediate (new_edge, stmts);
1972
                }
1973
              add_phi_arg (new_phi, new_arg, new_edge,
1974
                           gimple_phi_arg_location_from_edge (phi, old_edge));
1975
            }
1976
        }
1977
    }
1978
}
1979
 
1980
 
1981
/* Wrapper for remap_decl so it can be used as a callback.  */
1982
 
1983
static tree
1984
remap_decl_1 (tree decl, void *data)
1985
{
1986
  return remap_decl (decl, (copy_body_data *) data);
1987
}
1988
 
1989
/* Build struct function and associated datastructures for the new clone
1990
   NEW_FNDECL to be build.  CALLEE_FNDECL is the original */
1991
 
1992
static void
1993
initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
1994
{
1995
  struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1996
  gcov_type count_scale;
1997
 
1998
  if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1999
    count_scale = (REG_BR_PROB_BASE * count
2000
                   / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2001
  else
2002
    count_scale = REG_BR_PROB_BASE;
2003
 
2004
  /* Register specific tree functions.  */
2005
  gimple_register_cfg_hooks ();
2006
 
2007
  /* Get clean struct function.  */
2008
  push_struct_function (new_fndecl);
2009
 
2010
  /* We will rebuild these, so just sanity check that they are empty.  */
2011
  gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2012
  gcc_assert (cfun->local_decls == NULL);
2013
  gcc_assert (cfun->cfg == NULL);
2014
  gcc_assert (cfun->decl == new_fndecl);
2015
 
2016
  /* Copy items we preserve during clonning.  */
2017
  cfun->static_chain_decl = src_cfun->static_chain_decl;
2018
  cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2019
  cfun->function_end_locus = src_cfun->function_end_locus;
2020
  cfun->curr_properties = src_cfun->curr_properties;
2021
  cfun->last_verified = src_cfun->last_verified;
2022
  cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2023
  cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2024
  cfun->function_frequency = src_cfun->function_frequency;
2025
  cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2026
  cfun->stdarg = src_cfun->stdarg;
2027
  cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2028
  cfun->after_inlining = src_cfun->after_inlining;
2029
  cfun->returns_struct = src_cfun->returns_struct;
2030
  cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2031
  cfun->after_tree_profile = src_cfun->after_tree_profile;
2032
 
2033
  init_empty_tree_cfg ();
2034
 
2035
  profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2036
  ENTRY_BLOCK_PTR->count =
2037
    (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2038
     REG_BR_PROB_BASE);
2039
  ENTRY_BLOCK_PTR->frequency
2040
    = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2041
  EXIT_BLOCK_PTR->count =
2042
    (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2043
     REG_BR_PROB_BASE);
2044
  EXIT_BLOCK_PTR->frequency =
2045
    EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2046
  if (src_cfun->eh)
2047
    init_eh_for_function ();
2048
 
2049
  if (src_cfun->gimple_df)
2050
    {
2051
      init_tree_ssa (cfun);
2052
      cfun->gimple_df->in_ssa_p = true;
2053
      init_ssa_operands ();
2054
    }
2055
  pop_cfun ();
2056
}
2057
 
2058
/* Helper function for copy_cfg_body.  Move debug stmts from the end
2059
   of NEW_BB to the beginning of successor basic blocks when needed.  If the
2060
   successor has multiple predecessors, reset them, otherwise keep
2061
   their value.  */
2062
 
2063
static void
2064
maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2065
{
2066
  edge e;
2067
  edge_iterator ei;
2068
  gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2069
 
2070
  if (gsi_end_p (si)
2071
      || gsi_one_before_end_p (si)
2072
      || !(stmt_can_throw_internal (gsi_stmt (si))
2073
           || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2074
    return;
2075
 
2076
  FOR_EACH_EDGE (e, ei, new_bb->succs)
2077
    {
2078
      gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2079
      gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2080
      while (is_gimple_debug (gsi_stmt (ssi)))
2081
        {
2082
          gimple stmt = gsi_stmt (ssi), new_stmt;
2083
          tree var;
2084
          tree value;
2085
 
2086
          /* For the last edge move the debug stmts instead of copying
2087
             them.  */
2088
          if (ei_one_before_end_p (ei))
2089
            {
2090
              si = ssi;
2091
              gsi_prev (&ssi);
2092
              if (!single_pred_p (e->dest))
2093
                gimple_debug_bind_reset_value (stmt);
2094
              gsi_remove (&si, false);
2095
              gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2096
              continue;
2097
            }
2098
 
2099
          var = gimple_debug_bind_get_var (stmt);
2100
          if (single_pred_p (e->dest))
2101
            {
2102
              value = gimple_debug_bind_get_value (stmt);
2103
              value = unshare_expr (value);
2104
            }
2105
          else
2106
            value = NULL_TREE;
2107
          new_stmt = gimple_build_debug_bind (var, value, stmt);
2108
          gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2109
          VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2110
          gsi_prev (&ssi);
2111
        }
2112
    }
2113
}
2114
 
2115
/* Make a copy of the body of FN so that it can be inserted inline in
2116
   another function.  Walks FN via CFG, returns new fndecl.  */
2117
 
2118
static tree
2119
copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2120
               basic_block entry_block_map, basic_block exit_block_map)
2121
{
2122
  tree callee_fndecl = id->src_fn;
2123
  /* Original cfun for the callee, doesn't change.  */
2124
  struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2125
  struct function *cfun_to_copy;
2126
  basic_block bb;
2127
  tree new_fndecl = NULL;
2128
  bool need_debug_cleanup = false;
2129
  gcov_type count_scale;
2130
  int last;
2131
 
2132
  if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2133
    count_scale = (REG_BR_PROB_BASE * count
2134
                   / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2135
  else
2136
    count_scale = REG_BR_PROB_BASE;
2137
 
2138
  /* Register specific tree functions.  */
2139
  gimple_register_cfg_hooks ();
2140
 
2141
  /* Must have a CFG here at this point.  */
2142
  gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2143
              (DECL_STRUCT_FUNCTION (callee_fndecl)));
2144
 
2145
  cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2146
 
2147
  ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2148
  EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2149
  entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2150
  exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2151
 
2152
  /* Duplicate any exception-handling regions.  */
2153
  if (cfun->eh)
2154
    id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2155
                                       remap_decl_1, id);
2156
 
2157
  /* Use aux pointers to map the original blocks to copy.  */
2158
  FOR_EACH_BB_FN (bb, cfun_to_copy)
2159
    {
2160
      basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2161
      bb->aux = new_bb;
2162
      new_bb->aux = bb;
2163
    }
2164
 
2165
  last = last_basic_block;
2166
 
2167
  /* Now that we've duplicated the blocks, duplicate their edges.  */
2168
  FOR_ALL_BB_FN (bb, cfun_to_copy)
2169
    need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2170
 
2171
  if (gimple_in_ssa_p (cfun))
2172
    FOR_ALL_BB_FN (bb, cfun_to_copy)
2173
      copy_phis_for_bb (bb, id);
2174
 
2175
  FOR_ALL_BB_FN (bb, cfun_to_copy)
2176
    {
2177
      if (need_debug_cleanup
2178
          && bb->index != ENTRY_BLOCK
2179
          && bb->index != EXIT_BLOCK)
2180
        maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2181
      ((basic_block)bb->aux)->aux = NULL;
2182
      bb->aux = NULL;
2183
    }
2184
 
2185
  /* Zero out AUX fields of newly created block during EH edge
2186
     insertion. */
2187
  for (; last < last_basic_block; last++)
2188
    {
2189
      if (need_debug_cleanup)
2190
        maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2191
      BASIC_BLOCK (last)->aux = NULL;
2192
    }
2193
  entry_block_map->aux = NULL;
2194
  exit_block_map->aux = NULL;
2195
 
2196
  if (id->eh_map)
2197
    {
2198
      pointer_map_destroy (id->eh_map);
2199
      id->eh_map = NULL;
2200
    }
2201
 
2202
  return new_fndecl;
2203
}
2204
 
2205
/* Copy the debug STMT using ID.  We deal with these statements in a
2206
   special way: if any variable in their VALUE expression wasn't
2207
   remapped yet, we won't remap it, because that would get decl uids
2208
   out of sync, causing codegen differences between -g and -g0.  If
2209
   this arises, we drop the VALUE expression altogether.  */
2210
 
2211
static void
2212
copy_debug_stmt (gimple stmt, copy_body_data *id)
2213
{
2214
  tree t, *n;
2215
  struct walk_stmt_info wi;
2216
 
2217
  t = id->block;
2218
  if (gimple_block (stmt))
2219
    {
2220
      tree *n;
2221
      n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2222
      if (n)
2223
        t = *n;
2224
    }
2225
  gimple_set_block (stmt, t);
2226
 
2227
  /* Remap all the operands in COPY.  */
2228
  memset (&wi, 0, sizeof (wi));
2229
  wi.info = id;
2230
 
2231
  processing_debug_stmt = 1;
2232
 
2233
  t = gimple_debug_bind_get_var (stmt);
2234
 
2235
  if (TREE_CODE (t) == PARM_DECL && id->debug_map
2236
      && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2237
    {
2238
      gcc_assert (TREE_CODE (*n) == VAR_DECL);
2239
      t = *n;
2240
    }
2241
  else if (TREE_CODE (t) == VAR_DECL
2242
           && !TREE_STATIC (t)
2243
           && gimple_in_ssa_p (cfun)
2244
           && !pointer_map_contains (id->decl_map, t)
2245
           && !var_ann (t))
2246
    /* T is a non-localized variable.  */;
2247
  else
2248
    walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2249
 
2250
  gimple_debug_bind_set_var (stmt, t);
2251
 
2252
  if (gimple_debug_bind_has_value_p (stmt))
2253
    walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2254
               remap_gimple_op_r, &wi, NULL);
2255
 
2256
  /* Punt if any decl couldn't be remapped.  */
2257
  if (processing_debug_stmt < 0)
2258
    gimple_debug_bind_reset_value (stmt);
2259
 
2260
  processing_debug_stmt = 0;
2261
 
2262
  update_stmt (stmt);
2263
  if (gimple_in_ssa_p (cfun))
2264
    mark_symbols_for_renaming (stmt);
2265
}
2266
 
2267
/* Process deferred debug stmts.  In order to give values better odds
2268
   of being successfully remapped, we delay the processing of debug
2269
   stmts until all other stmts that might require remapping are
2270
   processed.  */
2271
 
2272
static void
2273
copy_debug_stmts (copy_body_data *id)
2274
{
2275
  size_t i;
2276
  gimple stmt;
2277
 
2278
  if (!id->debug_stmts)
2279
    return;
2280
 
2281
  for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2282
    copy_debug_stmt (stmt, id);
2283
 
2284
  VEC_free (gimple, heap, id->debug_stmts);
2285
}
2286
 
2287
/* Make a copy of the body of SRC_FN so that it can be inserted inline in
2288
   another function.  */
2289
 
2290
static tree
2291
copy_tree_body (copy_body_data *id)
2292
{
2293
  tree fndecl = id->src_fn;
2294
  tree body = DECL_SAVED_TREE (fndecl);
2295
 
2296
  walk_tree (&body, copy_tree_body_r, id, NULL);
2297
 
2298
  return body;
2299
}
2300
 
2301
/* Make a copy of the body of FN so that it can be inserted inline in
2302
   another function.  */
2303
 
2304
static tree
2305
copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2306
           basic_block entry_block_map, basic_block exit_block_map)
2307
{
2308
  tree fndecl = id->src_fn;
2309
  tree body;
2310
 
2311
  /* If this body has a CFG, walk CFG and copy.  */
2312
  gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2313
  body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map);
2314
  copy_debug_stmts (id);
2315
 
2316
  return body;
2317
}
2318
 
2319
/* Return true if VALUE is an ADDR_EXPR of an automatic variable
2320
   defined in function FN, or of a data member thereof.  */
2321
 
2322
static bool
2323
self_inlining_addr_expr (tree value, tree fn)
2324
{
2325
  tree var;
2326
 
2327
  if (TREE_CODE (value) != ADDR_EXPR)
2328
    return false;
2329
 
2330
  var = get_base_address (TREE_OPERAND (value, 0));
2331
 
2332
  return var && auto_var_in_fn_p (var, fn);
2333
}
2334
 
2335
/* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2336
   lexical block and line number information from base_stmt, if given,
2337
   or from the last stmt of the block otherwise.  */
2338
 
2339
static gimple
2340
insert_init_debug_bind (copy_body_data *id,
2341
                        basic_block bb, tree var, tree value,
2342
                        gimple base_stmt)
2343
{
2344
  gimple note;
2345
  gimple_stmt_iterator gsi;
2346
  tree tracked_var;
2347
 
2348
  if (!gimple_in_ssa_p (id->src_cfun))
2349
    return NULL;
2350
 
2351
  if (!MAY_HAVE_DEBUG_STMTS)
2352
    return NULL;
2353
 
2354
  tracked_var = target_for_debug_bind (var);
2355
  if (!tracked_var)
2356
    return NULL;
2357
 
2358
  if (bb)
2359
    {
2360
      gsi = gsi_last_bb (bb);
2361
      if (!base_stmt && !gsi_end_p (gsi))
2362
        base_stmt = gsi_stmt (gsi);
2363
    }
2364
 
2365
  note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2366
 
2367
  if (bb)
2368
    {
2369
      if (!gsi_end_p (gsi))
2370
        gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2371
      else
2372
        gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2373
    }
2374
 
2375
  return note;
2376
}
2377
 
2378
static void
2379
insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2380
{
2381
  /* If VAR represents a zero-sized variable, it's possible that the
2382
     assignment statement may result in no gimple statements.  */
2383
  if (init_stmt)
2384
    {
2385
      gimple_stmt_iterator si = gsi_last_bb (bb);
2386
 
2387
      /* We can end up with init statements that store to a non-register
2388
         from a rhs with a conversion.  Handle that here by forcing the
2389
         rhs into a temporary.  gimple_regimplify_operands is not
2390
         prepared to do this for us.  */
2391
      if (!is_gimple_debug (init_stmt)
2392
          && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2393
          && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2394
          && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2395
        {
2396
          tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2397
                             gimple_expr_type (init_stmt),
2398
                             gimple_assign_rhs1 (init_stmt));
2399
          rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2400
                                          GSI_NEW_STMT);
2401
          gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2402
          gimple_assign_set_rhs1 (init_stmt, rhs);
2403
        }
2404
      gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2405
      gimple_regimplify_operands (init_stmt, &si);
2406
      mark_symbols_for_renaming (init_stmt);
2407
 
2408
      if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2409
        {
2410
          tree var, def = gimple_assign_lhs (init_stmt);
2411
 
2412
          if (TREE_CODE (def) == SSA_NAME)
2413
            var = SSA_NAME_VAR (def);
2414
          else
2415
            var = def;
2416
 
2417
          insert_init_debug_bind (id, bb, var, def, init_stmt);
2418
        }
2419
    }
2420
}
2421
 
2422
/* Initialize parameter P with VALUE.  If needed, produce init statement
2423
   at the end of BB.  When BB is NULL, we return init statement to be
2424
   output later.  */
2425
static gimple
2426
setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2427
                     basic_block bb, tree *vars)
2428
{
2429
  gimple init_stmt = NULL;
2430
  tree var;
2431
  tree rhs = value;
2432
  tree def = (gimple_in_ssa_p (cfun)
2433
              ? gimple_default_def (id->src_cfun, p) : NULL);
2434
 
2435
  if (value
2436
      && value != error_mark_node
2437
      && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2438
    {
2439
      if (fold_convertible_p (TREE_TYPE (p), value))
2440
        rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2441
      else
2442
        /* ???  For valid (GIMPLE) programs we should not end up here.
2443
           Still if something has gone wrong and we end up with truly
2444
           mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2445
           to not leak invalid GIMPLE to the following passes.  */
2446
        rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2447
    }
2448
 
2449
  /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
2450
     here since the type of this decl must be visible to the calling
2451
     function.  */
2452
  var = copy_decl_to_var (p, id);
2453
 
2454
  /* We're actually using the newly-created var.  */
2455
  if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2456
    {
2457
      get_var_ann (var);
2458
      add_referenced_var (var);
2459
    }
2460
 
2461
  /* Declare this new variable.  */
2462
  TREE_CHAIN (var) = *vars;
2463
  *vars = var;
2464
 
2465
  /* Make gimplifier happy about this variable.  */
2466
  DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2467
 
2468
  /* If the parameter is never assigned to, has no SSA_NAMEs created,
2469
     we would not need to create a new variable here at all, if it
2470
     weren't for debug info.  Still, we can just use the argument
2471
     value.  */
2472
  if (TREE_READONLY (p)
2473
      && !TREE_ADDRESSABLE (p)
2474
      && value && !TREE_SIDE_EFFECTS (value)
2475
      && !def)
2476
    {
2477
      /* We may produce non-gimple trees by adding NOPs or introduce
2478
         invalid sharing when operand is not really constant.
2479
         It is not big deal to prohibit constant propagation here as
2480
         we will constant propagate in DOM1 pass anyway.  */
2481
      if (is_gimple_min_invariant (value)
2482
          && useless_type_conversion_p (TREE_TYPE (p),
2483
                                                 TREE_TYPE (value))
2484
          /* We have to be very careful about ADDR_EXPR.  Make sure
2485
             the base variable isn't a local variable of the inlined
2486
             function, e.g., when doing recursive inlining, direct or
2487
             mutually-recursive or whatever, which is why we don't
2488
             just test whether fn == current_function_decl.  */
2489
          && ! self_inlining_addr_expr (value, fn))
2490
        {
2491
          insert_decl_map (id, p, value);
2492
          insert_debug_decl_map (id, p, var);
2493
          return insert_init_debug_bind (id, bb, var, value, NULL);
2494
        }
2495
    }
2496
 
2497
  /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2498
     that way, when the PARM_DECL is encountered, it will be
2499
     automatically replaced by the VAR_DECL.  */
2500
  insert_decl_map (id, p, var);
2501
 
2502
  /* Even if P was TREE_READONLY, the new VAR should not be.
2503
     In the original code, we would have constructed a
2504
     temporary, and then the function body would have never
2505
     changed the value of P.  However, now, we will be
2506
     constructing VAR directly.  The constructor body may
2507
     change its value multiple times as it is being
2508
     constructed.  Therefore, it must not be TREE_READONLY;
2509
     the back-end assumes that TREE_READONLY variable is
2510
     assigned to only once.  */
2511
  if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2512
    TREE_READONLY (var) = 0;
2513
 
2514
  /* If there is no setup required and we are in SSA, take the easy route
2515
     replacing all SSA names representing the function parameter by the
2516
     SSA name passed to function.
2517
 
2518
     We need to construct map for the variable anyway as it might be used
2519
     in different SSA names when parameter is set in function.
2520
 
2521
     Do replacement at -O0 for const arguments replaced by constant.
2522
     This is important for builtin_constant_p and other construct requiring
2523
     constant argument to be visible in inlined function body.  */
2524
  if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2525
      && (optimize
2526
          || (TREE_READONLY (p)
2527
              && is_gimple_min_invariant (rhs)))
2528
      && (TREE_CODE (rhs) == SSA_NAME
2529
          || is_gimple_min_invariant (rhs))
2530
      && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2531
    {
2532
      insert_decl_map (id, def, rhs);
2533
      return insert_init_debug_bind (id, bb, var, rhs, NULL);
2534
    }
2535
 
2536
  /* If the value of argument is never used, don't care about initializing
2537
     it.  */
2538
  if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2539
    {
2540
      gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2541
      return insert_init_debug_bind (id, bb, var, rhs, NULL);
2542
    }
2543
 
2544
  /* Initialize this VAR_DECL from the equivalent argument.  Convert
2545
     the argument to the proper type in case it was promoted.  */
2546
  if (value)
2547
    {
2548
      if (rhs == error_mark_node)
2549
        {
2550
          insert_decl_map (id, p, var);
2551
          return insert_init_debug_bind (id, bb, var, rhs, NULL);
2552
        }
2553
 
2554
      STRIP_USELESS_TYPE_CONVERSION (rhs);
2555
 
2556
      /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2557
         keep our trees in gimple form.  */
2558
      if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2559
        {
2560
          def = remap_ssa_name (def, id);
2561
          init_stmt = gimple_build_assign (def, rhs);
2562
          SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2563
          set_default_def (var, NULL);
2564
        }
2565
      else
2566
        init_stmt = gimple_build_assign (var, rhs);
2567
 
2568
      if (bb && init_stmt)
2569
        insert_init_stmt (id, bb, init_stmt);
2570
    }
2571
  return init_stmt;
2572
}
2573
 
2574
/* Generate code to initialize the parameters of the function at the
2575
   top of the stack in ID from the GIMPLE_CALL STMT.  */
2576
 
2577
static void
2578
initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2579
                               tree fn, basic_block bb)
2580
{
2581
  tree parms;
2582
  size_t i;
2583
  tree p;
2584
  tree vars = NULL_TREE;
2585
  tree static_chain = gimple_call_chain (stmt);
2586
 
2587
  /* Figure out what the parameters are.  */
2588
  parms = DECL_ARGUMENTS (fn);
2589
 
2590
  /* Loop through the parameter declarations, replacing each with an
2591
     equivalent VAR_DECL, appropriately initialized.  */
2592
  for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2593
    {
2594
      tree val;
2595
      val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2596
      setup_one_parameter (id, p, val, fn, bb, &vars);
2597
    }
2598
 
2599
  /* Initialize the static chain.  */
2600
  p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2601
  gcc_assert (fn != current_function_decl);
2602
  if (p)
2603
    {
2604
      /* No static chain?  Seems like a bug in tree-nested.c.  */
2605
      gcc_assert (static_chain);
2606
 
2607
      setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2608
    }
2609
 
2610
  declare_inline_vars (id->block, vars);
2611
}
2612
 
2613
 
2614
/* Declare a return variable to replace the RESULT_DECL for the
2615
   function we are calling.  An appropriate DECL_STMT is returned.
2616
   The USE_STMT is filled to contain a use of the declaration to
2617
   indicate the return value of the function.
2618
 
2619
   RETURN_SLOT, if non-null is place where to store the result.  It
2620
   is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
2621
   was the LHS of the MODIFY_EXPR to which this call is the RHS.
2622
 
2623
   The return value is a (possibly null) value that holds the result
2624
   as seen by the caller.  */
2625
 
2626
static tree
2627
declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest)
2628
{
2629
  tree callee = id->src_fn;
2630
  tree caller = id->dst_fn;
2631
  tree result = DECL_RESULT (callee);
2632
  tree callee_type = TREE_TYPE (result);
2633
  tree caller_type;
2634
  tree var, use;
2635
 
2636
  /* Handle type-mismatches in the function declaration return type
2637
     vs. the call expression.  */
2638
  if (modify_dest)
2639
    caller_type = TREE_TYPE (modify_dest);
2640
  else
2641
    caller_type = TREE_TYPE (TREE_TYPE (callee));
2642
 
2643
  /* We don't need to do anything for functions that don't return
2644
     anything.  */
2645
  if (!result || VOID_TYPE_P (callee_type))
2646
    return NULL_TREE;
2647
 
2648
  /* If there was a return slot, then the return value is the
2649
     dereferenced address of that object.  */
2650
  if (return_slot)
2651
    {
2652
      /* The front end shouldn't have used both return_slot and
2653
         a modify expression.  */
2654
      gcc_assert (!modify_dest);
2655
      if (DECL_BY_REFERENCE (result))
2656
        {
2657
          tree return_slot_addr = build_fold_addr_expr (return_slot);
2658
          STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2659
 
2660
          /* We are going to construct *&return_slot and we can't do that
2661
             for variables believed to be not addressable.
2662
 
2663
             FIXME: This check possibly can match, because values returned
2664
             via return slot optimization are not believed to have address
2665
             taken by alias analysis.  */
2666
          gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2667
          if (gimple_in_ssa_p (cfun))
2668
            {
2669
              HOST_WIDE_INT bitsize;
2670
              HOST_WIDE_INT bitpos;
2671
              tree offset;
2672
              enum machine_mode mode;
2673
              int unsignedp;
2674
              int volatilep;
2675
              tree base;
2676
              base = get_inner_reference (return_slot, &bitsize, &bitpos,
2677
                                          &offset,
2678
                                          &mode, &unsignedp, &volatilep,
2679
                                          false);
2680
              if (TREE_CODE (base) == INDIRECT_REF)
2681
                base = TREE_OPERAND (base, 0);
2682
              if (TREE_CODE (base) == SSA_NAME)
2683
                base = SSA_NAME_VAR (base);
2684
              mark_sym_for_renaming (base);
2685
            }
2686
          var = return_slot_addr;
2687
        }
2688
      else
2689
        {
2690
          var = return_slot;
2691
          gcc_assert (TREE_CODE (var) != SSA_NAME);
2692
          TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2693
        }
2694
      if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2695
           || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2696
          && !DECL_GIMPLE_REG_P (result)
2697
          && DECL_P (var))
2698
        DECL_GIMPLE_REG_P (var) = 0;
2699
      use = NULL;
2700
      goto done;
2701
    }
2702
 
2703
  /* All types requiring non-trivial constructors should have been handled.  */
2704
  gcc_assert (!TREE_ADDRESSABLE (callee_type));
2705
 
2706
  /* Attempt to avoid creating a new temporary variable.  */
2707
  if (modify_dest
2708
      && TREE_CODE (modify_dest) != SSA_NAME)
2709
    {
2710
      bool use_it = false;
2711
 
2712
      /* We can't use MODIFY_DEST if there's type promotion involved.  */
2713
      if (!useless_type_conversion_p (callee_type, caller_type))
2714
        use_it = false;
2715
 
2716
      /* ??? If we're assigning to a variable sized type, then we must
2717
         reuse the destination variable, because we've no good way to
2718
         create variable sized temporaries at this point.  */
2719
      else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2720
        use_it = true;
2721
 
2722
      /* If the callee cannot possibly modify MODIFY_DEST, then we can
2723
         reuse it as the result of the call directly.  Don't do this if
2724
         it would promote MODIFY_DEST to addressable.  */
2725
      else if (TREE_ADDRESSABLE (result))
2726
        use_it = false;
2727
      else
2728
        {
2729
          tree base_m = get_base_address (modify_dest);
2730
 
2731
          /* If the base isn't a decl, then it's a pointer, and we don't
2732
             know where that's going to go.  */
2733
          if (!DECL_P (base_m))
2734
            use_it = false;
2735
          else if (is_global_var (base_m))
2736
            use_it = false;
2737
          else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2738
                    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2739
                   && !DECL_GIMPLE_REG_P (result)
2740
                   && DECL_GIMPLE_REG_P (base_m))
2741
            use_it = false;
2742
          else if (!TREE_ADDRESSABLE (base_m))
2743
            use_it = true;
2744
        }
2745
 
2746
      if (use_it)
2747
        {
2748
          var = modify_dest;
2749
          use = NULL;
2750
          goto done;
2751
        }
2752
    }
2753
 
2754
  gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2755
 
2756
  var = copy_result_decl_to_var (result, id);
2757
  if (gimple_in_ssa_p (cfun))
2758
    {
2759
      get_var_ann (var);
2760
      add_referenced_var (var);
2761
    }
2762
 
2763
  DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2764
  DECL_STRUCT_FUNCTION (caller)->local_decls
2765
    = tree_cons (NULL_TREE, var,
2766
                 DECL_STRUCT_FUNCTION (caller)->local_decls);
2767
 
2768
  /* Do not have the rest of GCC warn about this variable as it should
2769
     not be visible to the user.  */
2770
  TREE_NO_WARNING (var) = 1;
2771
 
2772
  declare_inline_vars (id->block, var);
2773
 
2774
  /* Build the use expr.  If the return type of the function was
2775
     promoted, convert it back to the expected type.  */
2776
  use = var;
2777
  if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2778
    use = fold_convert (caller_type, var);
2779
 
2780
  STRIP_USELESS_TYPE_CONVERSION (use);
2781
 
2782
  if (DECL_BY_REFERENCE (result))
2783
    {
2784
      TREE_ADDRESSABLE (var) = 1;
2785
      var = build_fold_addr_expr (var);
2786
    }
2787
 
2788
 done:
2789
  /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2790
     way, when the RESULT_DECL is encountered, it will be
2791
     automatically replaced by the VAR_DECL.  */
2792
  insert_decl_map (id, result, var);
2793
 
2794
  /* Remember this so we can ignore it in remap_decls.  */
2795
  id->retvar = var;
2796
 
2797
  return use;
2798
}
2799
 
2800
/* Callback through walk_tree.  Determine if a DECL_INITIAL makes reference
2801
   to a local label.  */
2802
 
2803
static tree
2804
has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2805
{
2806
  tree node = *nodep;
2807
  tree fn = (tree) fnp;
2808
 
2809
  if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2810
    return node;
2811
 
2812
  if (TYPE_P (node))
2813
    *walk_subtrees = 0;
2814
 
2815
  return NULL_TREE;
2816
}
2817
 
2818
/* Determine if the function can be copied.  If so return NULL.  If
2819
   not return a string describng the reason for failure.  */
2820
 
2821
static const char *
2822
copy_forbidden (struct function *fun, tree fndecl)
2823
{
2824
  const char *reason = fun->cannot_be_copied_reason;
2825
  tree step;
2826
 
2827
  /* Only examine the function once.  */
2828
  if (fun->cannot_be_copied_set)
2829
    return reason;
2830
 
2831
  /* We cannot copy a function that receives a non-local goto
2832
     because we cannot remap the destination label used in the
2833
     function that is performing the non-local goto.  */
2834
  /* ??? Actually, this should be possible, if we work at it.
2835
     No doubt there's just a handful of places that simply
2836
     assume it doesn't happen and don't substitute properly.  */
2837
  if (fun->has_nonlocal_label)
2838
    {
2839
      reason = G_("function %q+F can never be copied "
2840
                  "because it receives a non-local goto");
2841
      goto fail;
2842
    }
2843
 
2844
  for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2845
    {
2846
      tree decl = TREE_VALUE (step);
2847
 
2848
      if (TREE_CODE (decl) == VAR_DECL
2849
          && TREE_STATIC (decl)
2850
          && !DECL_EXTERNAL (decl)
2851
          && DECL_INITIAL (decl)
2852
          && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2853
                                           has_label_address_in_static_1,
2854
                                           fndecl))
2855
        {
2856
          reason = G_("function %q+F can never be copied because it saves "
2857
                      "address of local label in a static variable");
2858
          goto fail;
2859
        }
2860
    }
2861
 
2862
 fail:
2863
  fun->cannot_be_copied_reason = reason;
2864
  fun->cannot_be_copied_set = true;
2865
  return reason;
2866
}
2867
 
2868
 
2869
static const char *inline_forbidden_reason;
2870
 
2871
/* A callback for walk_gimple_seq to handle statements.  Returns non-null
2872
   iff a function can not be inlined.  Also sets the reason why. */
2873
 
2874
static tree
2875
inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2876
                         struct walk_stmt_info *wip)
2877
{
2878
  tree fn = (tree) wip->info;
2879
  tree t;
2880
  gimple stmt = gsi_stmt (*gsi);
2881
 
2882
  switch (gimple_code (stmt))
2883
    {
2884
    case GIMPLE_CALL:
2885
      /* Refuse to inline alloca call unless user explicitly forced so as
2886
         this may change program's memory overhead drastically when the
2887
         function using alloca is called in loop.  In GCC present in
2888
         SPEC2000 inlining into schedule_block cause it to require 2GB of
2889
         RAM instead of 256MB.  */
2890
      if (gimple_alloca_call_p (stmt)
2891
          && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2892
        {
2893
          inline_forbidden_reason
2894
            = G_("function %q+F can never be inlined because it uses "
2895
                 "alloca (override using the always_inline attribute)");
2896
          *handled_ops_p = true;
2897
          return fn;
2898
        }
2899
 
2900
      t = gimple_call_fndecl (stmt);
2901
      if (t == NULL_TREE)
2902
        break;
2903
 
2904
      /* We cannot inline functions that call setjmp.  */
2905
      if (setjmp_call_p (t))
2906
        {
2907
          inline_forbidden_reason
2908
            = G_("function %q+F can never be inlined because it uses setjmp");
2909
          *handled_ops_p = true;
2910
          return t;
2911
        }
2912
 
2913
      if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2914
        switch (DECL_FUNCTION_CODE (t))
2915
          {
2916
            /* We cannot inline functions that take a variable number of
2917
               arguments.  */
2918
          case BUILT_IN_VA_START:
2919
          case BUILT_IN_NEXT_ARG:
2920
          case BUILT_IN_VA_END:
2921
            inline_forbidden_reason
2922
              = G_("function %q+F can never be inlined because it "
2923
                   "uses variable argument lists");
2924
            *handled_ops_p = true;
2925
            return t;
2926
 
2927
          case BUILT_IN_LONGJMP:
2928
            /* We can't inline functions that call __builtin_longjmp at
2929
               all.  The non-local goto machinery really requires the
2930
               destination be in a different function.  If we allow the
2931
               function calling __builtin_longjmp to be inlined into the
2932
               function calling __builtin_setjmp, Things will Go Awry.  */
2933
            inline_forbidden_reason
2934
              = G_("function %q+F can never be inlined because "
2935
                   "it uses setjmp-longjmp exception handling");
2936
            *handled_ops_p = true;
2937
            return t;
2938
 
2939
          case BUILT_IN_NONLOCAL_GOTO:
2940
            /* Similarly.  */
2941
            inline_forbidden_reason
2942
              = G_("function %q+F can never be inlined because "
2943
                   "it uses non-local goto");
2944
            *handled_ops_p = true;
2945
            return t;
2946
 
2947
          case BUILT_IN_RETURN:
2948
          case BUILT_IN_APPLY_ARGS:
2949
            /* If a __builtin_apply_args caller would be inlined,
2950
               it would be saving arguments of the function it has
2951
               been inlined into.  Similarly __builtin_return would
2952
               return from the function the inline has been inlined into.  */
2953
            inline_forbidden_reason
2954
              = G_("function %q+F can never be inlined because "
2955
                   "it uses __builtin_return or __builtin_apply_args");
2956
            *handled_ops_p = true;
2957
            return t;
2958
 
2959
          default:
2960
            break;
2961
          }
2962
      break;
2963
 
2964
    case GIMPLE_GOTO:
2965
      t = gimple_goto_dest (stmt);
2966
 
2967
      /* We will not inline a function which uses computed goto.  The
2968
         addresses of its local labels, which may be tucked into
2969
         global storage, are of course not constant across
2970
         instantiations, which causes unexpected behavior.  */
2971
      if (TREE_CODE (t) != LABEL_DECL)
2972
        {
2973
          inline_forbidden_reason
2974
            = G_("function %q+F can never be inlined "
2975
                 "because it contains a computed goto");
2976
          *handled_ops_p = true;
2977
          return t;
2978
        }
2979
      break;
2980
 
2981
    default:
2982
      break;
2983
    }
2984
 
2985
  *handled_ops_p = false;
2986
  return NULL_TREE;
2987
}
2988
 
2989
/* Return true if FNDECL is a function that cannot be inlined into
2990
   another one.  */
2991
 
2992
static bool
2993
inline_forbidden_p (tree fndecl)
2994
{
2995
  struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2996
  struct walk_stmt_info wi;
2997
  struct pointer_set_t *visited_nodes;
2998
  basic_block bb;
2999
  bool forbidden_p = false;
3000
 
3001
  /* First check for shared reasons not to copy the code.  */
3002
  inline_forbidden_reason = copy_forbidden (fun, fndecl);
3003
  if (inline_forbidden_reason != NULL)
3004
    return true;
3005
 
3006
  /* Next, walk the statements of the function looking for
3007
     constraucts we can't handle, or are non-optimal for inlining.  */
3008
  visited_nodes = pointer_set_create ();
3009
  memset (&wi, 0, sizeof (wi));
3010
  wi.info = (void *) fndecl;
3011
  wi.pset = visited_nodes;
3012
 
3013
  FOR_EACH_BB_FN (bb, fun)
3014
    {
3015
      gimple ret;
3016
      gimple_seq seq = bb_seq (bb);
3017
      ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3018
      forbidden_p = (ret != NULL);
3019
      if (forbidden_p)
3020
        break;
3021
    }
3022
 
3023
  pointer_set_destroy (visited_nodes);
3024
  return forbidden_p;
3025
}
3026
 
3027
/* Returns nonzero if FN is a function that does not have any
3028
   fundamental inline blocking properties.  */
3029
 
3030
bool
3031
tree_inlinable_function_p (tree fn)
3032
{
3033
  bool inlinable = true;
3034
  bool do_warning;
3035
  tree always_inline;
3036
 
3037
  /* If we've already decided this function shouldn't be inlined,
3038
     there's no need to check again.  */
3039
  if (DECL_UNINLINABLE (fn))
3040
    return false;
3041
 
3042
  /* We only warn for functions declared `inline' by the user.  */
3043
  do_warning = (warn_inline
3044
                && DECL_DECLARED_INLINE_P (fn)
3045
                && !DECL_NO_INLINE_WARNING_P (fn)
3046
                && !DECL_IN_SYSTEM_HEADER (fn));
3047
 
3048
  always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3049
 
3050
  if (flag_no_inline
3051
      && always_inline == NULL)
3052
    {
3053
      if (do_warning)
3054
        warning (OPT_Winline, "function %q+F can never be inlined because it "
3055
                 "is suppressed using -fno-inline", fn);
3056
      inlinable = false;
3057
    }
3058
 
3059
  /* Don't auto-inline anything that might not be bound within
3060
     this unit of translation.  */
3061
  else if (!DECL_DECLARED_INLINE_P (fn)
3062
           && DECL_REPLACEABLE_P (fn))
3063
    inlinable = false;
3064
 
3065
  else if (!function_attribute_inlinable_p (fn))
3066
    {
3067
      if (do_warning)
3068
        warning (OPT_Winline, "function %q+F can never be inlined because it "
3069
                 "uses attributes conflicting with inlining", fn);
3070
      inlinable = false;
3071
    }
3072
 
3073
  else if (inline_forbidden_p (fn))
3074
    {
3075
      /* See if we should warn about uninlinable functions.  Previously,
3076
         some of these warnings would be issued while trying to expand
3077
         the function inline, but that would cause multiple warnings
3078
         about functions that would for example call alloca.  But since
3079
         this a property of the function, just one warning is enough.
3080
         As a bonus we can now give more details about the reason why a
3081
         function is not inlinable.  */
3082
      if (always_inline)
3083
        sorry (inline_forbidden_reason, fn);
3084
      else if (do_warning)
3085
        warning (OPT_Winline, inline_forbidden_reason, fn);
3086
 
3087
      inlinable = false;
3088
    }
3089
 
3090
  /* Squirrel away the result so that we don't have to check again.  */
3091
  DECL_UNINLINABLE (fn) = !inlinable;
3092
 
3093
  return inlinable;
3094
}
3095
 
3096
/* Estimate the cost of a memory move.  Use machine dependent
3097
   word size and take possible memcpy call into account.  */
3098
 
3099
int
3100
estimate_move_cost (tree type)
3101
{
3102
  HOST_WIDE_INT size;
3103
 
3104
  gcc_assert (!VOID_TYPE_P (type));
3105
 
3106
  size = int_size_in_bytes (type);
3107
 
3108
  if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3109
    /* Cost of a memcpy call, 3 arguments and the call.  */
3110
    return 4;
3111
  else
3112
    return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3113
}
3114
 
3115
/* Returns cost of operation CODE, according to WEIGHTS  */
3116
 
3117
static int
3118
estimate_operator_cost (enum tree_code code, eni_weights *weights,
3119
                        tree op1 ATTRIBUTE_UNUSED, tree op2)
3120
{
3121
  switch (code)
3122
    {
3123
    /* These are "free" conversions, or their presumed cost
3124
       is folded into other operations.  */
3125
    case RANGE_EXPR:
3126
    CASE_CONVERT:
3127
    case COMPLEX_EXPR:
3128
    case PAREN_EXPR:
3129
      return 0;
3130
 
3131
    /* Assign cost of 1 to usual operations.
3132
       ??? We may consider mapping RTL costs to this.  */
3133
    case COND_EXPR:
3134
    case VEC_COND_EXPR:
3135
 
3136
    case PLUS_EXPR:
3137
    case POINTER_PLUS_EXPR:
3138
    case MINUS_EXPR:
3139
    case MULT_EXPR:
3140
 
3141
    case ADDR_SPACE_CONVERT_EXPR:
3142
    case FIXED_CONVERT_EXPR:
3143
    case FIX_TRUNC_EXPR:
3144
 
3145
    case NEGATE_EXPR:
3146
    case FLOAT_EXPR:
3147
    case MIN_EXPR:
3148
    case MAX_EXPR:
3149
    case ABS_EXPR:
3150
 
3151
    case LSHIFT_EXPR:
3152
    case RSHIFT_EXPR:
3153
    case LROTATE_EXPR:
3154
    case RROTATE_EXPR:
3155
    case VEC_LSHIFT_EXPR:
3156
    case VEC_RSHIFT_EXPR:
3157
 
3158
    case BIT_IOR_EXPR:
3159
    case BIT_XOR_EXPR:
3160
    case BIT_AND_EXPR:
3161
    case BIT_NOT_EXPR:
3162
 
3163
    case TRUTH_ANDIF_EXPR:
3164
    case TRUTH_ORIF_EXPR:
3165
    case TRUTH_AND_EXPR:
3166
    case TRUTH_OR_EXPR:
3167
    case TRUTH_XOR_EXPR:
3168
    case TRUTH_NOT_EXPR:
3169
 
3170
    case LT_EXPR:
3171
    case LE_EXPR:
3172
    case GT_EXPR:
3173
    case GE_EXPR:
3174
    case EQ_EXPR:
3175
    case NE_EXPR:
3176
    case ORDERED_EXPR:
3177
    case UNORDERED_EXPR:
3178
 
3179
    case UNLT_EXPR:
3180
    case UNLE_EXPR:
3181
    case UNGT_EXPR:
3182
    case UNGE_EXPR:
3183
    case UNEQ_EXPR:
3184
    case LTGT_EXPR:
3185
 
3186
    case CONJ_EXPR:
3187
 
3188
    case PREDECREMENT_EXPR:
3189
    case PREINCREMENT_EXPR:
3190
    case POSTDECREMENT_EXPR:
3191
    case POSTINCREMENT_EXPR:
3192
 
3193
    case REALIGN_LOAD_EXPR:
3194
 
3195
    case REDUC_MAX_EXPR:
3196
    case REDUC_MIN_EXPR:
3197
    case REDUC_PLUS_EXPR:
3198
    case WIDEN_SUM_EXPR:
3199
    case WIDEN_MULT_EXPR:
3200
    case DOT_PROD_EXPR:
3201
 
3202
    case VEC_WIDEN_MULT_HI_EXPR:
3203
    case VEC_WIDEN_MULT_LO_EXPR:
3204
    case VEC_UNPACK_HI_EXPR:
3205
    case VEC_UNPACK_LO_EXPR:
3206
    case VEC_UNPACK_FLOAT_HI_EXPR:
3207
    case VEC_UNPACK_FLOAT_LO_EXPR:
3208
    case VEC_PACK_TRUNC_EXPR:
3209
    case VEC_PACK_SAT_EXPR:
3210
    case VEC_PACK_FIX_TRUNC_EXPR:
3211
    case VEC_EXTRACT_EVEN_EXPR:
3212
    case VEC_EXTRACT_ODD_EXPR:
3213
    case VEC_INTERLEAVE_HIGH_EXPR:
3214
    case VEC_INTERLEAVE_LOW_EXPR:
3215
 
3216
      return 1;
3217
 
3218
    /* Few special cases of expensive operations.  This is useful
3219
       to avoid inlining on functions having too many of these.  */
3220
    case TRUNC_DIV_EXPR:
3221
    case CEIL_DIV_EXPR:
3222
    case FLOOR_DIV_EXPR:
3223
    case ROUND_DIV_EXPR:
3224
    case EXACT_DIV_EXPR:
3225
    case TRUNC_MOD_EXPR:
3226
    case CEIL_MOD_EXPR:
3227
    case FLOOR_MOD_EXPR:
3228
    case ROUND_MOD_EXPR:
3229
    case RDIV_EXPR:
3230
      if (TREE_CODE (op2) != INTEGER_CST)
3231
        return weights->div_mod_cost;
3232
      return 1;
3233
 
3234
    default:
3235
      /* We expect a copy assignment with no operator.  */
3236
      gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3237
      return 0;
3238
    }
3239
}
3240
 
3241
 
3242
/* Estimate number of instructions that will be created by expanding
3243
   the statements in the statement sequence STMTS.
3244
   WEIGHTS contains weights attributed to various constructs.  */
3245
 
3246
static
3247
int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3248
{
3249
  int cost;
3250
  gimple_stmt_iterator gsi;
3251
 
3252
  cost = 0;
3253
  for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3254
    cost += estimate_num_insns (gsi_stmt (gsi), weights);
3255
 
3256
  return cost;
3257
}
3258
 
3259
 
3260
/* Estimate number of instructions that will be created by expanding STMT.
3261
   WEIGHTS contains weights attributed to various constructs.  */
3262
 
3263
int
3264
estimate_num_insns (gimple stmt, eni_weights *weights)
3265
{
3266
  unsigned cost, i;
3267
  enum gimple_code code = gimple_code (stmt);
3268
  tree lhs;
3269
  tree rhs;
3270
 
3271
  switch (code)
3272
    {
3273
    case GIMPLE_ASSIGN:
3274
      /* Try to estimate the cost of assignments.  We have three cases to
3275
         deal with:
3276
         1) Simple assignments to registers;
3277
         2) Stores to things that must live in memory.  This includes
3278
            "normal" stores to scalars, but also assignments of large
3279
            structures, or constructors of big arrays;
3280
 
3281
         Let us look at the first two cases, assuming we have "a = b + C":
3282
         <GIMPLE_ASSIGN <var_decl "a">
3283
                <plus_expr <var_decl "b"> <constant C>>
3284
         If "a" is a GIMPLE register, the assignment to it is free on almost
3285
         any target, because "a" usually ends up in a real register.  Hence
3286
         the only cost of this expression comes from the PLUS_EXPR, and we
3287
         can ignore the GIMPLE_ASSIGN.
3288
         If "a" is not a GIMPLE register, the assignment to "a" will most
3289
         likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3290
         of moving something into "a", which we compute using the function
3291
         estimate_move_cost.  */
3292
      lhs = gimple_assign_lhs (stmt);
3293
      rhs = gimple_assign_rhs1 (stmt);
3294
 
3295
      if (is_gimple_reg (lhs))
3296
        cost = 0;
3297
      else
3298
        cost = estimate_move_cost (TREE_TYPE (lhs));
3299
 
3300
      if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3301
        cost += estimate_move_cost (TREE_TYPE (rhs));
3302
 
3303
      cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3304
                                      gimple_assign_rhs1 (stmt),
3305
                                      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3306
                                      == GIMPLE_BINARY_RHS
3307
                                      ? gimple_assign_rhs2 (stmt) : NULL);
3308
      break;
3309
 
3310
    case GIMPLE_COND:
3311
      cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3312
                                         gimple_op (stmt, 0),
3313
                                         gimple_op (stmt, 1));
3314
      break;
3315
 
3316
    case GIMPLE_SWITCH:
3317
      /* Take into account cost of the switch + guess 2 conditional jumps for
3318
         each case label.
3319
 
3320
         TODO: once the switch expansion logic is sufficiently separated, we can
3321
         do better job on estimating cost of the switch.  */
3322
      if (weights->time_based)
3323
        cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3324
      else
3325
        cost = gimple_switch_num_labels (stmt) * 2;
3326
      break;
3327
 
3328
    case GIMPLE_CALL:
3329
      {
3330
        tree decl = gimple_call_fndecl (stmt);
3331
        tree addr = gimple_call_fn (stmt);
3332
        tree funtype = TREE_TYPE (addr);
3333
 
3334
        if (POINTER_TYPE_P (funtype))
3335
          funtype = TREE_TYPE (funtype);
3336
 
3337
        if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3338
          cost = weights->target_builtin_call_cost;
3339
        else
3340
          cost = weights->call_cost;
3341
 
3342
        if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3343
          switch (DECL_FUNCTION_CODE (decl))
3344
            {
3345
            case BUILT_IN_CONSTANT_P:
3346
              return 0;
3347
            case BUILT_IN_EXPECT:
3348
              return 0;
3349
 
3350
            /* Prefetch instruction is not expensive.  */
3351
            case BUILT_IN_PREFETCH:
3352
              cost = weights->target_builtin_call_cost;
3353
              break;
3354
 
3355
            /* Exception state returns or moves registers around.  */
3356
            case BUILT_IN_EH_FILTER:
3357
            case BUILT_IN_EH_POINTER:
3358
            case BUILT_IN_EH_COPY_VALUES:
3359
              return 0;
3360
 
3361
            default:
3362
              break;
3363
            }
3364
 
3365
        if (decl)
3366
          funtype = TREE_TYPE (decl);
3367
 
3368
        if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3369
          cost += estimate_move_cost (TREE_TYPE (funtype));
3370
        /* Our cost must be kept in sync with
3371
           cgraph_estimate_size_after_inlining that does use function
3372
           declaration to figure out the arguments.  */
3373
        if (decl && DECL_ARGUMENTS (decl))
3374
          {
3375
            tree arg;
3376
            for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3377
              if (!VOID_TYPE_P (TREE_TYPE (arg)))
3378
                cost += estimate_move_cost (TREE_TYPE (arg));
3379
          }
3380
        else if (funtype && prototype_p (funtype))
3381
          {
3382
            tree t;
3383
            for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3384
                 t = TREE_CHAIN (t))
3385
              if (!VOID_TYPE_P (TREE_VALUE (t)))
3386
                cost += estimate_move_cost (TREE_VALUE (t));
3387
          }
3388
        else
3389
          {
3390
            for (i = 0; i < gimple_call_num_args (stmt); i++)
3391
              {
3392
                tree arg = gimple_call_arg (stmt, i);
3393
                if (!VOID_TYPE_P (TREE_TYPE (arg)))
3394
                  cost += estimate_move_cost (TREE_TYPE (arg));
3395
              }
3396
          }
3397
 
3398
        break;
3399
      }
3400
 
3401
    case GIMPLE_GOTO:
3402
    case GIMPLE_LABEL:
3403
    case GIMPLE_NOP:
3404
    case GIMPLE_PHI:
3405
    case GIMPLE_RETURN:
3406
    case GIMPLE_PREDICT:
3407
    case GIMPLE_DEBUG:
3408
      return 0;
3409
 
3410
    case GIMPLE_ASM:
3411
      return asm_str_count (gimple_asm_string (stmt));
3412
 
3413
    case GIMPLE_RESX:
3414
      /* This is either going to be an external function call with one
3415
         argument, or two register copy statements plus a goto.  */
3416
      return 2;
3417
 
3418
    case GIMPLE_EH_DISPATCH:
3419
      /* ??? This is going to turn into a switch statement.  Ideally
3420
         we'd have a look at the eh region and estimate the number of
3421
         edges involved.  */
3422
      return 10;
3423
 
3424
    case GIMPLE_BIND:
3425
      return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3426
 
3427
    case GIMPLE_EH_FILTER:
3428
      return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3429
 
3430
    case GIMPLE_CATCH:
3431
      return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3432
 
3433
    case GIMPLE_TRY:
3434
      return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3435
              + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3436
 
3437
    /* OpenMP directives are generally very expensive.  */
3438
 
3439
    case GIMPLE_OMP_RETURN:
3440
    case GIMPLE_OMP_SECTIONS_SWITCH:
3441
    case GIMPLE_OMP_ATOMIC_STORE:
3442
    case GIMPLE_OMP_CONTINUE:
3443
      /* ...except these, which are cheap.  */
3444
      return 0;
3445
 
3446
    case GIMPLE_OMP_ATOMIC_LOAD:
3447
      return weights->omp_cost;
3448
 
3449
    case GIMPLE_OMP_FOR:
3450
      return (weights->omp_cost
3451
              + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3452
              + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3453
 
3454
    case GIMPLE_OMP_PARALLEL:
3455
    case GIMPLE_OMP_TASK:
3456
    case GIMPLE_OMP_CRITICAL:
3457
    case GIMPLE_OMP_MASTER:
3458
    case GIMPLE_OMP_ORDERED:
3459
    case GIMPLE_OMP_SECTION:
3460
    case GIMPLE_OMP_SECTIONS:
3461
    case GIMPLE_OMP_SINGLE:
3462
      return (weights->omp_cost
3463
              + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3464
 
3465
    default:
3466
      gcc_unreachable ();
3467
    }
3468
 
3469
  return cost;
3470
}
3471
 
3472
/* Estimate number of instructions that will be created by expanding
3473
   function FNDECL.  WEIGHTS contains weights attributed to various
3474
   constructs.  */
3475
 
3476
int
3477
estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3478
{
3479
  struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3480
  gimple_stmt_iterator bsi;
3481
  basic_block bb;
3482
  int n = 0;
3483
 
3484
  gcc_assert (my_function && my_function->cfg);
3485
  FOR_EACH_BB_FN (bb, my_function)
3486
    {
3487
      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3488
        n += estimate_num_insns (gsi_stmt (bsi), weights);
3489
    }
3490
 
3491
  return n;
3492
}
3493
 
3494
 
3495
/* Initializes weights used by estimate_num_insns.  */
3496
 
3497
void
3498
init_inline_once (void)
3499
{
3500
  eni_size_weights.call_cost = 1;
3501
  eni_size_weights.target_builtin_call_cost = 1;
3502
  eni_size_weights.div_mod_cost = 1;
3503
  eni_size_weights.omp_cost = 40;
3504
  eni_size_weights.time_based = false;
3505
 
3506
  /* Estimating time for call is difficult, since we have no idea what the
3507
     called function does.  In the current uses of eni_time_weights,
3508
     underestimating the cost does less harm than overestimating it, so
3509
     we choose a rather small value here.  */
3510
  eni_time_weights.call_cost = 10;
3511
  eni_time_weights.target_builtin_call_cost = 10;
3512
  eni_time_weights.div_mod_cost = 10;
3513
  eni_time_weights.omp_cost = 40;
3514
  eni_time_weights.time_based = true;
3515
}
3516
 
3517
/* Estimate the number of instructions in a gimple_seq. */
3518
 
3519
int
3520
count_insns_seq (gimple_seq seq, eni_weights *weights)
3521
{
3522
  gimple_stmt_iterator gsi;
3523
  int n = 0;
3524
  for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3525
    n += estimate_num_insns (gsi_stmt (gsi), weights);
3526
 
3527
  return n;
3528
}
3529
 
3530
 
3531
/* Install new lexical TREE_BLOCK underneath 'current_block'.  */
3532
 
3533
static void
3534
prepend_lexical_block (tree current_block, tree new_block)
3535
{
3536
  BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3537
  BLOCK_SUBBLOCKS (current_block) = new_block;
3538
  BLOCK_SUPERCONTEXT (new_block) = current_block;
3539
}
3540
 
3541
/* Fetch callee declaration from the call graph edge going from NODE and
3542
   associated with STMR call statement.  Return NULL_TREE if not found.  */
3543
static tree
3544
get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3545
{
3546
  struct cgraph_edge *cs;
3547
 
3548
  cs = cgraph_edge (node, stmt);
3549
  if (cs)
3550
    return cs->callee->decl;
3551
 
3552
  return NULL_TREE;
3553
}
3554
 
3555
/* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
3556
 
3557
static bool
3558
expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3559
{
3560
  tree use_retvar;
3561
  tree fn;
3562
  struct pointer_map_t *st, *dst;
3563
  tree return_slot;
3564
  tree modify_dest;
3565
  location_t saved_location;
3566
  struct cgraph_edge *cg_edge;
3567
  cgraph_inline_failed_t reason;
3568
  basic_block return_block;
3569
  edge e;
3570
  gimple_stmt_iterator gsi, stmt_gsi;
3571
  bool successfully_inlined = FALSE;
3572
  bool purge_dead_abnormal_edges;
3573
  tree t_step;
3574
  tree var;
3575
 
3576
  /* Set input_location here so we get the right instantiation context
3577
     if we call instantiate_decl from inlinable_function_p.  */
3578
  saved_location = input_location;
3579
  if (gimple_has_location (stmt))
3580
    input_location = gimple_location (stmt);
3581
 
3582
  /* From here on, we're only interested in CALL_EXPRs.  */
3583
  if (gimple_code (stmt) != GIMPLE_CALL)
3584
    goto egress;
3585
 
3586
  /* First, see if we can figure out what function is being called.
3587
     If we cannot, then there is no hope of inlining the function.  */
3588
  fn = gimple_call_fndecl (stmt);
3589
  if (!fn)
3590
    {
3591
      fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3592
      if (!fn)
3593
        goto egress;
3594
    }
3595
 
3596
  /* Turn forward declarations into real ones.  */
3597
  fn = cgraph_node (fn)->decl;
3598
 
3599
  /* If FN is a declaration of a function in a nested scope that was
3600
     globally declared inline, we don't set its DECL_INITIAL.
3601
     However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3602
     C++ front-end uses it for cdtors to refer to their internal
3603
     declarations, that are not real functions.  Fortunately those
3604
     don't have trees to be saved, so we can tell by checking their
3605
     gimple_body.  */
3606
  if (!DECL_INITIAL (fn)
3607
      && DECL_ABSTRACT_ORIGIN (fn)
3608
      && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3609
    fn = DECL_ABSTRACT_ORIGIN (fn);
3610
 
3611
  /* Objective C and fortran still calls tree_rest_of_compilation directly.
3612
     Kill this check once this is fixed.  */
3613
  if (!id->dst_node->analyzed)
3614
    goto egress;
3615
 
3616
  cg_edge = cgraph_edge (id->dst_node, stmt);
3617
 
3618
  /* Don't inline functions with different EH personalities.  */
3619
  if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3620
      && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)
3621
      && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3622
          != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)))
3623
    goto egress;
3624
 
3625
  /* Don't try to inline functions that are not well-suited to
3626
     inlining.  */
3627
  if (!cgraph_inline_p (cg_edge, &reason))
3628
    {
3629
      /* If this call was originally indirect, we do not want to emit any
3630
         inlining related warnings or sorry messages because there are no
3631
         guarantees regarding those.  */
3632
      if (cg_edge->indirect_call)
3633
        goto egress;
3634
 
3635
      if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3636
          /* Avoid warnings during early inline pass. */
3637
          && cgraph_global_info_ready)
3638
        {
3639
          sorry ("inlining failed in call to %q+F: %s", fn,
3640
                 cgraph_inline_failed_string (reason));
3641
          sorry ("called from here");
3642
        }
3643
      else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3644
               && !DECL_IN_SYSTEM_HEADER (fn)
3645
               && reason != CIF_UNSPECIFIED
3646
               && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3647
               /* Avoid warnings during early inline pass. */
3648
               && cgraph_global_info_ready)
3649
        {
3650
          warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3651
                   fn, cgraph_inline_failed_string (reason));
3652
          warning (OPT_Winline, "called from here");
3653
        }
3654
      goto egress;
3655
    }
3656
  fn = cg_edge->callee->decl;
3657
 
3658
#ifdef ENABLE_CHECKING
3659
  if (cg_edge->callee->decl != id->dst_node->decl)
3660
    verify_cgraph_node (cg_edge->callee);
3661
#endif
3662
 
3663
  /* We will be inlining this callee.  */
3664
  id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3665
 
3666
  /* Update the callers EH personality.  */
3667
  if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3668
    DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3669
      = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3670
 
3671
  /* Split the block holding the GIMPLE_CALL.  */
3672
  e = split_block (bb, stmt);
3673
  bb = e->src;
3674
  return_block = e->dest;
3675
  remove_edge (e);
3676
 
3677
  /* split_block splits after the statement; work around this by
3678
     moving the call into the second block manually.  Not pretty,
3679
     but seems easier than doing the CFG manipulation by hand
3680
     when the GIMPLE_CALL is in the last statement of BB.  */
3681
  stmt_gsi = gsi_last_bb (bb);
3682
  gsi_remove (&stmt_gsi, false);
3683
 
3684
  /* If the GIMPLE_CALL was in the last statement of BB, it may have
3685
     been the source of abnormal edges.  In this case, schedule
3686
     the removal of dead abnormal edges.  */
3687
  gsi = gsi_start_bb (return_block);
3688
  if (gsi_end_p (gsi))
3689
    {
3690
      gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3691
      purge_dead_abnormal_edges = true;
3692
    }
3693
  else
3694
    {
3695
      gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3696
      purge_dead_abnormal_edges = false;
3697
    }
3698
 
3699
  stmt_gsi = gsi_start_bb (return_block);
3700
 
3701
  /* Build a block containing code to initialize the arguments, the
3702
     actual inline expansion of the body, and a label for the return
3703
     statements within the function to jump to.  The type of the
3704
     statement expression is the return type of the function call.  */
3705
  id->block = make_node (BLOCK);
3706
  BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3707
  BLOCK_SOURCE_LOCATION (id->block) = input_location;
3708
  prepend_lexical_block (gimple_block (stmt), id->block);
3709
 
3710
  /* Local declarations will be replaced by their equivalents in this
3711
     map.  */
3712
  st = id->decl_map;
3713
  id->decl_map = pointer_map_create ();
3714
  dst = id->debug_map;
3715
  id->debug_map = NULL;
3716
 
3717
  /* Record the function we are about to inline.  */
3718
  id->src_fn = fn;
3719
  id->src_node = cg_edge->callee;
3720
  id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3721
  id->gimple_call = stmt;
3722
 
3723
  gcc_assert (!id->src_cfun->after_inlining);
3724
 
3725
  id->entry_bb = bb;
3726
  if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3727
    {
3728
      gimple_stmt_iterator si = gsi_last_bb (bb);
3729
      gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3730
                                                   NOT_TAKEN),
3731
                        GSI_NEW_STMT);
3732
    }
3733
  initialize_inlined_parameters (id, stmt, fn, bb);
3734
 
3735
  if (DECL_INITIAL (fn))
3736
    prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3737
 
3738
  /* Return statements in the function body will be replaced by jumps
3739
     to the RET_LABEL.  */
3740
  gcc_assert (DECL_INITIAL (fn));
3741
  gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3742
 
3743
  /* Find the LHS to which the result of this call is assigned.  */
3744
  return_slot = NULL;
3745
  if (gimple_call_lhs (stmt))
3746
    {
3747
      modify_dest = gimple_call_lhs (stmt);
3748
 
3749
      /* The function which we are inlining might not return a value,
3750
         in which case we should issue a warning that the function
3751
         does not return a value.  In that case the optimizers will
3752
         see that the variable to which the value is assigned was not
3753
         initialized.  We do not want to issue a warning about that
3754
         uninitialized variable.  */
3755
      if (DECL_P (modify_dest))
3756
        TREE_NO_WARNING (modify_dest) = 1;
3757
 
3758
      if (gimple_call_return_slot_opt_p (stmt))
3759
        {
3760
          return_slot = modify_dest;
3761
          modify_dest = NULL;
3762
        }
3763
    }
3764
  else
3765
    modify_dest = NULL;
3766
 
3767
  /* If we are inlining a call to the C++ operator new, we don't want
3768
     to use type based alias analysis on the return value.  Otherwise
3769
     we may get confused if the compiler sees that the inlined new
3770
     function returns a pointer which was just deleted.  See bug
3771
     33407.  */
3772
  if (DECL_IS_OPERATOR_NEW (fn))
3773
    {
3774
      return_slot = NULL;
3775
      modify_dest = NULL;
3776
    }
3777
 
3778
  /* Declare the return variable for the function.  */
3779
  use_retvar = declare_return_variable (id, return_slot, modify_dest);
3780
 
3781
  /* Add local vars in this inlined callee to caller.  */
3782
  t_step = id->src_cfun->local_decls;
3783
  for (; t_step; t_step = TREE_CHAIN (t_step))
3784
    {
3785
      var = TREE_VALUE (t_step);
3786
      if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3787
        {
3788
          if (var_ann (var) && add_referenced_var (var))
3789
            cfun->local_decls = tree_cons (NULL_TREE, var,
3790
                                           cfun->local_decls);
3791
        }
3792
      else if (!can_be_nonlocal (var, id))
3793
        cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3794
                                       cfun->local_decls);
3795
    }
3796
 
3797
  if (dump_file && (dump_flags & TDF_DETAILS))
3798
    {
3799
      fprintf (dump_file, "Inlining ");
3800
      print_generic_expr (dump_file, id->src_fn, 0);
3801
      fprintf (dump_file, " to ");
3802
      print_generic_expr (dump_file, id->dst_fn, 0);
3803
      fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3804
    }
3805
 
3806
  /* This is it.  Duplicate the callee body.  Assume callee is
3807
     pre-gimplified.  Note that we must not alter the caller
3808
     function in any way before this point, as this CALL_EXPR may be
3809
     a self-referential call; if we're calling ourselves, we need to
3810
     duplicate our body before altering anything.  */
3811
  copy_body (id, bb->count,
3812
             cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3813
             bb, return_block);
3814
 
3815
  /* Reset the escaped and callused solutions.  */
3816
  if (cfun->gimple_df)
3817
    {
3818
      pt_solution_reset (&cfun->gimple_df->escaped);
3819
      pt_solution_reset (&cfun->gimple_df->callused);
3820
    }
3821
 
3822
  /* Clean up.  */
3823
  if (id->debug_map)
3824
    {
3825
      pointer_map_destroy (id->debug_map);
3826
      id->debug_map = dst;
3827
    }
3828
  pointer_map_destroy (id->decl_map);
3829
  id->decl_map = st;
3830
 
3831
  /* Unlink the calls virtual operands before replacing it.  */
3832
  unlink_stmt_vdef (stmt);
3833
 
3834
  /* If the inlined function returns a result that we care about,
3835
     substitute the GIMPLE_CALL with an assignment of the return
3836
     variable to the LHS of the call.  That is, if STMT was
3837
     'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
3838
  if (use_retvar && gimple_call_lhs (stmt))
3839
    {
3840
      gimple old_stmt = stmt;
3841
      stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3842
      gsi_replace (&stmt_gsi, stmt, false);
3843
      if (gimple_in_ssa_p (cfun))
3844
        mark_symbols_for_renaming (stmt);
3845
      maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3846
    }
3847
  else
3848
    {
3849
      /* Handle the case of inlining a function with no return
3850
         statement, which causes the return value to become undefined.  */
3851
      if (gimple_call_lhs (stmt)
3852
          && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3853
        {
3854
          tree name = gimple_call_lhs (stmt);
3855
          tree var = SSA_NAME_VAR (name);
3856
          tree def = gimple_default_def (cfun, var);
3857
 
3858
          if (def)
3859
            {
3860
              /* If the variable is used undefined, make this name
3861
                 undefined via a move.  */
3862
              stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3863
              gsi_replace (&stmt_gsi, stmt, true);
3864
            }
3865
          else
3866
            {
3867
              /* Otherwise make this variable undefined.  */
3868
              gsi_remove (&stmt_gsi, true);
3869
              set_default_def (var, name);
3870
              SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3871
            }
3872
        }
3873
      else
3874
        gsi_remove (&stmt_gsi, true);
3875
    }
3876
 
3877
  if (purge_dead_abnormal_edges)
3878
    gimple_purge_dead_abnormal_call_edges (return_block);
3879
 
3880
  /* If the value of the new expression is ignored, that's OK.  We
3881
     don't warn about this for CALL_EXPRs, so we shouldn't warn about
3882
     the equivalent inlined version either.  */
3883
  if (is_gimple_assign (stmt))
3884
    {
3885
      gcc_assert (gimple_assign_single_p (stmt)
3886
                  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3887
      TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3888
    }
3889
 
3890
  /* Output the inlining info for this abstract function, since it has been
3891
     inlined.  If we don't do this now, we can lose the information about the
3892
     variables in the function when the blocks get blown away as soon as we
3893
     remove the cgraph node.  */
3894
  (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3895
 
3896
  /* Update callgraph if needed.  */
3897
  cgraph_remove_node (cg_edge->callee);
3898
 
3899
  id->block = NULL_TREE;
3900
  successfully_inlined = TRUE;
3901
 
3902
 egress:
3903
  input_location = saved_location;
3904
  return successfully_inlined;
3905
}
3906
 
3907
/* Expand call statements reachable from STMT_P.
3908
   We can only have CALL_EXPRs as the "toplevel" tree code or nested
3909
   in a MODIFY_EXPR.  See tree-gimple.c:get_call_expr_in().  We can
3910
   unfortunately not use that function here because we need a pointer
3911
   to the CALL_EXPR, not the tree itself.  */
3912
 
3913
static bool
3914
gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
3915
{
3916
  gimple_stmt_iterator gsi;
3917
 
3918
  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3919
    {
3920
      gimple stmt = gsi_stmt (gsi);
3921
 
3922
      if (is_gimple_call (stmt)
3923
          && expand_call_inline (bb, stmt, id))
3924
        return true;
3925
    }
3926
 
3927
  return false;
3928
}
3929
 
3930
 
3931
/* Walk all basic blocks created after FIRST and try to fold every statement
3932
   in the STATEMENTS pointer set.  */
3933
 
3934
static void
3935
fold_marked_statements (int first, struct pointer_set_t *statements)
3936
{
3937
  for (; first < n_basic_blocks; first++)
3938
    if (BASIC_BLOCK (first))
3939
      {
3940
        gimple_stmt_iterator gsi;
3941
 
3942
        for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3943
             !gsi_end_p (gsi);
3944
             gsi_next (&gsi))
3945
          if (pointer_set_contains (statements, gsi_stmt (gsi)))
3946
            {
3947
              gimple old_stmt = gsi_stmt (gsi);
3948
              tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
3949
 
3950
              if (old_decl && DECL_BUILT_IN (old_decl))
3951
                {
3952
                  /* Folding builtins can create multiple instructions,
3953
                     we need to look at all of them.  */
3954
                  gimple_stmt_iterator i2 = gsi;
3955
                  gsi_prev (&i2);
3956
                  if (fold_stmt (&gsi))
3957
                    {
3958
                      gimple new_stmt;
3959
                      if (gsi_end_p (i2))
3960
                        i2 = gsi_start_bb (BASIC_BLOCK (first));
3961
                      else
3962
                        gsi_next (&i2);
3963
                      while (1)
3964
                        {
3965
                          new_stmt = gsi_stmt (i2);
3966
                          update_stmt (new_stmt);
3967
                          cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3968
                                                             new_stmt);
3969
 
3970
                          if (new_stmt == gsi_stmt (gsi))
3971
                            {
3972
                              /* It is okay to check only for the very last
3973
                                 of these statements.  If it is a throwing
3974
                                 statement nothing will change.  If it isn't
3975
                                 this can remove EH edges.  If that weren't
3976
                                 correct then because some intermediate stmts
3977
                                 throw, but not the last one.  That would mean
3978
                                 we'd have to split the block, which we can't
3979
                                 here and we'd loose anyway.  And as builtins
3980
                                 probably never throw, this all
3981
                                 is mood anyway.  */
3982
                              if (maybe_clean_or_replace_eh_stmt (old_stmt,
3983
                                                                  new_stmt))
3984
                                gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3985
                              break;
3986
                            }
3987
                          gsi_next (&i2);
3988
                        }
3989
                    }
3990
                }
3991
              else if (fold_stmt (&gsi))
3992
                {
3993
                  /* Re-read the statement from GSI as fold_stmt() may
3994
                     have changed it.  */
3995
                  gimple new_stmt = gsi_stmt (gsi);
3996
                  update_stmt (new_stmt);
3997
 
3998
                  if (is_gimple_call (old_stmt)
3999
                      || is_gimple_call (new_stmt))
4000
                    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4001
                                                       new_stmt);
4002
 
4003
                  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4004
                    gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4005
                }
4006
            }
4007
      }
4008
}
4009
 
4010
/* Return true if BB has at least one abnormal outgoing edge.  */
4011
 
4012
static inline bool
4013
has_abnormal_outgoing_edge_p (basic_block bb)
4014
{
4015
  edge e;
4016
  edge_iterator ei;
4017
 
4018
  FOR_EACH_EDGE (e, ei, bb->succs)
4019
    if (e->flags & EDGE_ABNORMAL)
4020
      return true;
4021
 
4022
  return false;
4023
}
4024
 
4025
/* Expand calls to inline functions in the body of FN.  */
4026
 
4027
unsigned int
4028
optimize_inline_calls (tree fn)
4029
{
4030
  copy_body_data id;
4031
  basic_block bb;
4032
  int last = n_basic_blocks;
4033
  struct gimplify_ctx gctx;
4034
 
4035
  /* There is no point in performing inlining if errors have already
4036
     occurred -- and we might crash if we try to inline invalid
4037
     code.  */
4038
  if (errorcount || sorrycount)
4039
    return 0;
4040
 
4041
  /* Clear out ID.  */
4042
  memset (&id, 0, sizeof (id));
4043
 
4044
  id.src_node = id.dst_node = cgraph_node (fn);
4045
  id.dst_fn = fn;
4046
  /* Or any functions that aren't finished yet.  */
4047
  if (current_function_decl)
4048
    id.dst_fn = current_function_decl;
4049
 
4050
  id.copy_decl = copy_decl_maybe_to_var;
4051
  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4052
  id.transform_new_cfg = false;
4053
  id.transform_return_to_modify = true;
4054
  id.transform_lang_insert_block = NULL;
4055
  id.statements_to_fold = pointer_set_create ();
4056
 
4057
  push_gimplify_context (&gctx);
4058
 
4059
  /* We make no attempts to keep dominance info up-to-date.  */
4060
  free_dominance_info (CDI_DOMINATORS);
4061
  free_dominance_info (CDI_POST_DOMINATORS);
4062
 
4063
  /* Register specific gimple functions.  */
4064
  gimple_register_cfg_hooks ();
4065
 
4066
  /* Reach the trees by walking over the CFG, and note the
4067
     enclosing basic-blocks in the call edges.  */
4068
  /* We walk the blocks going forward, because inlined function bodies
4069
     will split id->current_basic_block, and the new blocks will
4070
     follow it; we'll trudge through them, processing their CALL_EXPRs
4071
     along the way.  */
4072
  FOR_EACH_BB (bb)
4073
    gimple_expand_calls_inline (bb, &id);
4074
 
4075
  pop_gimplify_context (NULL);
4076
 
4077
#ifdef ENABLE_CHECKING
4078
    {
4079
      struct cgraph_edge *e;
4080
 
4081
      verify_cgraph_node (id.dst_node);
4082
 
4083
      /* Double check that we inlined everything we are supposed to inline.  */
4084
      for (e = id.dst_node->callees; e; e = e->next_callee)
4085
        gcc_assert (e->inline_failed);
4086
    }
4087
#endif
4088
 
4089
  /* Fold the statements before compacting/renumbering the basic blocks.  */
4090
  fold_marked_statements (last, id.statements_to_fold);
4091
  pointer_set_destroy (id.statements_to_fold);
4092
 
4093
  gcc_assert (!id.debug_stmts);
4094
 
4095
  /* Renumber the (code) basic_blocks consecutively.  */
4096
  compact_blocks ();
4097
  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
4098
  number_blocks (fn);
4099
 
4100
  fold_cond_expr_cond ();
4101
  delete_unreachable_blocks_update_callgraph (&id);
4102
#ifdef ENABLE_CHECKING
4103
  verify_cgraph_node (id.dst_node);
4104
#endif
4105
 
4106
  /* It would be nice to check SSA/CFG/statement consistency here, but it is
4107
     not possible yet - the IPA passes might make various functions to not
4108
     throw and they don't care to proactively update local EH info.  This is
4109
     done later in fixup_cfg pass that also execute the verification.  */
4110
  return (TODO_update_ssa
4111
          | TODO_cleanup_cfg
4112
          | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4113
          | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4114
}
4115
 
4116
/* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
4117
 
4118
tree
4119
copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4120
{
4121
  enum tree_code code = TREE_CODE (*tp);
4122
  enum tree_code_class cl = TREE_CODE_CLASS (code);
4123
 
4124
  /* We make copies of most nodes.  */
4125
  if (IS_EXPR_CODE_CLASS (cl)
4126
      || code == TREE_LIST
4127
      || code == TREE_VEC
4128
      || code == TYPE_DECL
4129
      || code == OMP_CLAUSE)
4130
    {
4131
      /* Because the chain gets clobbered when we make a copy, we save it
4132
         here.  */
4133
      tree chain = NULL_TREE, new_tree;
4134
 
4135
      chain = TREE_CHAIN (*tp);
4136
 
4137
      /* Copy the node.  */
4138
      new_tree = copy_node (*tp);
4139
 
4140
      /* Propagate mudflap marked-ness.  */
4141
      if (flag_mudflap && mf_marked_p (*tp))
4142
        mf_mark (new_tree);
4143
 
4144
      *tp = new_tree;
4145
 
4146
      /* Now, restore the chain, if appropriate.  That will cause
4147
         walk_tree to walk into the chain as well.  */
4148
      if (code == PARM_DECL
4149
          || code == TREE_LIST
4150
          || code == OMP_CLAUSE)
4151
        TREE_CHAIN (*tp) = chain;
4152
 
4153
      /* For now, we don't update BLOCKs when we make copies.  So, we
4154
         have to nullify all BIND_EXPRs.  */
4155
      if (TREE_CODE (*tp) == BIND_EXPR)
4156
        BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4157
    }
4158
  else if (code == CONSTRUCTOR)
4159
    {
4160
      /* CONSTRUCTOR nodes need special handling because
4161
         we need to duplicate the vector of elements.  */
4162
      tree new_tree;
4163
 
4164
      new_tree = copy_node (*tp);
4165
 
4166
      /* Propagate mudflap marked-ness.  */
4167
      if (flag_mudflap && mf_marked_p (*tp))
4168
        mf_mark (new_tree);
4169
 
4170
      CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4171
                                         CONSTRUCTOR_ELTS (*tp));
4172
      *tp = new_tree;
4173
    }
4174
  else if (TREE_CODE_CLASS (code) == tcc_type)
4175
    *walk_subtrees = 0;
4176
  else if (TREE_CODE_CLASS (code) == tcc_declaration)
4177
    *walk_subtrees = 0;
4178
  else if (TREE_CODE_CLASS (code) == tcc_constant)
4179
    *walk_subtrees = 0;
4180
  else
4181
    gcc_assert (code != STATEMENT_LIST);
4182
  return NULL_TREE;
4183
}
4184
 
4185
/* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
4186
   information indicating to what new SAVE_EXPR this one should be mapped,
4187
   use that one.  Otherwise, create a new node and enter it in ST.  FN is
4188
   the function into which the copy will be placed.  */
4189
 
4190
static void
4191
remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4192
{
4193
  struct pointer_map_t *st = (struct pointer_map_t *) st_;
4194
  tree *n;
4195
  tree t;
4196
 
4197
  /* See if we already encountered this SAVE_EXPR.  */
4198
  n = (tree *) pointer_map_contains (st, *tp);
4199
 
4200
  /* If we didn't already remap this SAVE_EXPR, do so now.  */
4201
  if (!n)
4202
    {
4203
      t = copy_node (*tp);
4204
 
4205
      /* Remember this SAVE_EXPR.  */
4206
      *pointer_map_insert (st, *tp) = t;
4207
      /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
4208
      *pointer_map_insert (st, t) = t;
4209
    }
4210
  else
4211
    {
4212
      /* We've already walked into this SAVE_EXPR; don't do it again.  */
4213
      *walk_subtrees = 0;
4214
      t = *n;
4215
    }
4216
 
4217
  /* Replace this SAVE_EXPR with the copy.  */
4218
  *tp = t;
4219
}
4220
 
4221
/* Called via walk_tree.  If *TP points to a DECL_STMT for a local label,
4222
   copies the declaration and enters it in the splay_tree in DATA (which is
4223
   really an `copy_body_data *').  */
4224
 
4225
static tree
4226
mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4227
                        void *data)
4228
{
4229
  copy_body_data *id = (copy_body_data *) data;
4230
 
4231
  /* Don't walk into types.  */
4232
  if (TYPE_P (*tp))
4233
    *walk_subtrees = 0;
4234
 
4235
  else if (TREE_CODE (*tp) == LABEL_EXPR)
4236
    {
4237
      tree decl = TREE_OPERAND (*tp, 0);
4238
 
4239
      /* Copy the decl and remember the copy.  */
4240
      insert_decl_map (id, decl, id->copy_decl (decl, id));
4241
    }
4242
 
4243
  return NULL_TREE;
4244
}
4245
 
4246
/* Perform any modifications to EXPR required when it is unsaved.  Does
4247
   not recurse into EXPR's subtrees.  */
4248
 
4249
static void
4250
unsave_expr_1 (tree expr)
4251
{
4252
  switch (TREE_CODE (expr))
4253
    {
4254
    case TARGET_EXPR:
4255
      /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4256
         It's OK for this to happen if it was part of a subtree that
4257
         isn't immediately expanded, such as operand 2 of another
4258
         TARGET_EXPR.  */
4259
      if (TREE_OPERAND (expr, 1))
4260
        break;
4261
 
4262
      TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4263
      TREE_OPERAND (expr, 3) = NULL_TREE;
4264
      break;
4265
 
4266
    default:
4267
      break;
4268
    }
4269
}
4270
 
4271
/* Called via walk_tree when an expression is unsaved.  Using the
4272
   splay_tree pointed to by ST (which is really a `splay_tree'),
4273
   remaps all local declarations to appropriate replacements.  */
4274
 
4275
static tree
4276
unsave_r (tree *tp, int *walk_subtrees, void *data)
4277
{
4278
  copy_body_data *id = (copy_body_data *) data;
4279
  struct pointer_map_t *st = id->decl_map;
4280
  tree *n;
4281
 
4282
  /* Only a local declaration (variable or label).  */
4283
  if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4284
      || TREE_CODE (*tp) == LABEL_DECL)
4285
    {
4286
      /* Lookup the declaration.  */
4287
      n = (tree *) pointer_map_contains (st, *tp);
4288
 
4289
      /* If it's there, remap it.  */
4290
      if (n)
4291
        *tp = *n;
4292
    }
4293
 
4294
  else if (TREE_CODE (*tp) == STATEMENT_LIST)
4295
    gcc_unreachable ();
4296
  else if (TREE_CODE (*tp) == BIND_EXPR)
4297
    copy_bind_expr (tp, walk_subtrees, id);
4298
  else if (TREE_CODE (*tp) == SAVE_EXPR
4299
           || TREE_CODE (*tp) == TARGET_EXPR)
4300
    remap_save_expr (tp, st, walk_subtrees);
4301
  else
4302
    {
4303
      copy_tree_r (tp, walk_subtrees, NULL);
4304
 
4305
      /* Do whatever unsaving is required.  */
4306
      unsave_expr_1 (*tp);
4307
    }
4308
 
4309
  /* Keep iterating.  */
4310
  return NULL_TREE;
4311
}
4312
 
4313
/* Copies everything in EXPR and replaces variables, labels
4314
   and SAVE_EXPRs local to EXPR.  */
4315
 
4316
tree
4317
unsave_expr_now (tree expr)
4318
{
4319
  copy_body_data id;
4320
 
4321
  /* There's nothing to do for NULL_TREE.  */
4322
  if (expr == 0)
4323
    return expr;
4324
 
4325
  /* Set up ID.  */
4326
  memset (&id, 0, sizeof (id));
4327
  id.src_fn = current_function_decl;
4328
  id.dst_fn = current_function_decl;
4329
  id.decl_map = pointer_map_create ();
4330
  id.debug_map = NULL;
4331
 
4332
  id.copy_decl = copy_decl_no_change;
4333
  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4334
  id.transform_new_cfg = false;
4335
  id.transform_return_to_modify = false;
4336
  id.transform_lang_insert_block = NULL;
4337
 
4338
  /* Walk the tree once to find local labels.  */
4339
  walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4340
 
4341
  /* Walk the tree again, copying, remapping, and unsaving.  */
4342
  walk_tree (&expr, unsave_r, &id, NULL);
4343
 
4344
  /* Clean up.  */
4345
  pointer_map_destroy (id.decl_map);
4346
  if (id.debug_map)
4347
    pointer_map_destroy (id.debug_map);
4348
 
4349
  return expr;
4350
}
4351
 
4352
/* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
4353
   label, copies the declaration and enters it in the splay_tree in DATA (which
4354
   is really a 'copy_body_data *'.  */
4355
 
4356
static tree
4357
mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4358
                        bool *handled_ops_p ATTRIBUTE_UNUSED,
4359
                        struct walk_stmt_info *wi)
4360
{
4361
  copy_body_data *id = (copy_body_data *) wi->info;
4362
  gimple stmt = gsi_stmt (*gsip);
4363
 
4364
  if (gimple_code (stmt) == GIMPLE_LABEL)
4365
    {
4366
      tree decl = gimple_label_label (stmt);
4367
 
4368
      /* Copy the decl and remember the copy.  */
4369
      insert_decl_map (id, decl, id->copy_decl (decl, id));
4370
    }
4371
 
4372
  return NULL_TREE;
4373
}
4374
 
4375
 
4376
/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4377
   Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4378
   remaps all local declarations to appropriate replacements in gimple
4379
   operands. */
4380
 
4381
static tree
4382
replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4383
{
4384
  struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4385
  copy_body_data *id = (copy_body_data *) wi->info;
4386
  struct pointer_map_t *st = id->decl_map;
4387
  tree *n;
4388
  tree expr = *tp;
4389
 
4390
  /* Only a local declaration (variable or label).  */
4391
  if ((TREE_CODE (expr) == VAR_DECL
4392
       && !TREE_STATIC (expr))
4393
      || TREE_CODE (expr) == LABEL_DECL)
4394
    {
4395
      /* Lookup the declaration.  */
4396
      n = (tree *) pointer_map_contains (st, expr);
4397
 
4398
      /* If it's there, remap it.  */
4399
      if (n)
4400
        *tp = *n;
4401
      *walk_subtrees = 0;
4402
    }
4403
  else if (TREE_CODE (expr) == STATEMENT_LIST
4404
           || TREE_CODE (expr) == BIND_EXPR
4405
           || TREE_CODE (expr) == SAVE_EXPR)
4406
    gcc_unreachable ();
4407
  else if (TREE_CODE (expr) == TARGET_EXPR)
4408
    {
4409
      /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4410
         It's OK for this to happen if it was part of a subtree that
4411
         isn't immediately expanded, such as operand 2 of another
4412
         TARGET_EXPR.  */
4413
      if (!TREE_OPERAND (expr, 1))
4414
        {
4415
          TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4416
          TREE_OPERAND (expr, 3) = NULL_TREE;
4417
        }
4418
    }
4419
 
4420
  /* Keep iterating.  */
4421
  return NULL_TREE;
4422
}
4423
 
4424
 
4425
/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4426
   Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4427
   remaps all local declarations to appropriate replacements in gimple
4428
   statements. */
4429
 
4430
static tree
4431
replace_locals_stmt (gimple_stmt_iterator *gsip,
4432
                     bool *handled_ops_p ATTRIBUTE_UNUSED,
4433
                     struct walk_stmt_info *wi)
4434
{
4435
  copy_body_data *id = (copy_body_data *) wi->info;
4436
  gimple stmt = gsi_stmt (*gsip);
4437
 
4438
  if (gimple_code (stmt) == GIMPLE_BIND)
4439
    {
4440
      tree block = gimple_bind_block (stmt);
4441
 
4442
      if (block)
4443
        {
4444
          remap_block (&block, id);
4445
          gimple_bind_set_block (stmt, block);
4446
        }
4447
 
4448
      /* This will remap a lot of the same decls again, but this should be
4449
         harmless.  */
4450
      if (gimple_bind_vars (stmt))
4451
        gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4452
    }
4453
 
4454
  /* Keep iterating.  */
4455
  return NULL_TREE;
4456
}
4457
 
4458
 
4459
/* Copies everything in SEQ and replaces variables and labels local to
4460
   current_function_decl.  */
4461
 
4462
gimple_seq
4463
copy_gimple_seq_and_replace_locals (gimple_seq seq)
4464
{
4465
  copy_body_data id;
4466
  struct walk_stmt_info wi;
4467
  struct pointer_set_t *visited;
4468
  gimple_seq copy;
4469
 
4470
  /* There's nothing to do for NULL_TREE.  */
4471
  if (seq == NULL)
4472
    return seq;
4473
 
4474
  /* Set up ID.  */
4475
  memset (&id, 0, sizeof (id));
4476
  id.src_fn = current_function_decl;
4477
  id.dst_fn = current_function_decl;
4478
  id.decl_map = pointer_map_create ();
4479
  id.debug_map = NULL;
4480
 
4481
  id.copy_decl = copy_decl_no_change;
4482
  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4483
  id.transform_new_cfg = false;
4484
  id.transform_return_to_modify = false;
4485
  id.transform_lang_insert_block = NULL;
4486
 
4487
  /* Walk the tree once to find local labels.  */
4488
  memset (&wi, 0, sizeof (wi));
4489
  visited = pointer_set_create ();
4490
  wi.info = &id;
4491
  wi.pset = visited;
4492
  walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4493
  pointer_set_destroy (visited);
4494
 
4495
  copy = gimple_seq_copy (seq);
4496
 
4497
  /* Walk the copy, remapping decls.  */
4498
  memset (&wi, 0, sizeof (wi));
4499
  wi.info = &id;
4500
  walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4501
 
4502
  /* Clean up.  */
4503
  pointer_map_destroy (id.decl_map);
4504
  if (id.debug_map)
4505
    pointer_map_destroy (id.debug_map);
4506
 
4507
  return copy;
4508
}
4509
 
4510
 
4511
/* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
4512
 
4513
static tree
4514
debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4515
{
4516
  if (*tp == data)
4517
    return (tree) data;
4518
  else
4519
    return NULL;
4520
}
4521
 
4522
bool
4523
debug_find_tree (tree top, tree search)
4524
{
4525
  return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4526
}
4527
 
4528
 
4529
/* Declare the variables created by the inliner.  Add all the variables in
4530
   VARS to BIND_EXPR.  */
4531
 
4532
static void
4533
declare_inline_vars (tree block, tree vars)
4534
{
4535
  tree t;
4536
  for (t = vars; t; t = TREE_CHAIN (t))
4537
    {
4538
      DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4539
      gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4540
      cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4541
    }
4542
 
4543
  if (block)
4544
    BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4545
}
4546
 
4547
/* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
4548
   but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
4549
   VAR_DECL translation.  */
4550
 
4551
static tree
4552
copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4553
{
4554
  /* Don't generate debug information for the copy if we wouldn't have
4555
     generated it for the copy either.  */
4556
  DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4557
  DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4558
 
4559
  /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4560
     declaration inspired this copy.  */
4561
  DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4562
 
4563
  /* The new variable/label has no RTL, yet.  */
4564
  if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4565
      && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4566
    SET_DECL_RTL (copy, NULL_RTX);
4567
 
4568
  /* These args would always appear unused, if not for this.  */
4569
  TREE_USED (copy) = 1;
4570
 
4571
  /* Set the context for the new declaration.  */
4572
  if (!DECL_CONTEXT (decl))
4573
    /* Globals stay global.  */
4574
    ;
4575
  else if (DECL_CONTEXT (decl) != id->src_fn)
4576
    /* Things that weren't in the scope of the function we're inlining
4577
       from aren't in the scope we're inlining to, either.  */
4578
    ;
4579
  else if (TREE_STATIC (decl))
4580
    /* Function-scoped static variables should stay in the original
4581
       function.  */
4582
    ;
4583
  else
4584
    /* Ordinary automatic local variables are now in the scope of the
4585
       new function.  */
4586
    DECL_CONTEXT (copy) = id->dst_fn;
4587
 
4588
  return copy;
4589
}
4590
 
4591
static tree
4592
copy_decl_to_var (tree decl, copy_body_data *id)
4593
{
4594
  tree copy, type;
4595
 
4596
  gcc_assert (TREE_CODE (decl) == PARM_DECL
4597
              || TREE_CODE (decl) == RESULT_DECL);
4598
 
4599
  type = TREE_TYPE (decl);
4600
 
4601
  copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4602
                     VAR_DECL, DECL_NAME (decl), type);
4603
  TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4604
  TREE_READONLY (copy) = TREE_READONLY (decl);
4605
  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4606
  DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4607
 
4608
  return copy_decl_for_dup_finish (id, decl, copy);
4609
}
4610
 
4611
/* Like copy_decl_to_var, but create a return slot object instead of a
4612
   pointer variable for return by invisible reference.  */
4613
 
4614
static tree
4615
copy_result_decl_to_var (tree decl, copy_body_data *id)
4616
{
4617
  tree copy, type;
4618
 
4619
  gcc_assert (TREE_CODE (decl) == PARM_DECL
4620
              || TREE_CODE (decl) == RESULT_DECL);
4621
 
4622
  type = TREE_TYPE (decl);
4623
  if (DECL_BY_REFERENCE (decl))
4624
    type = TREE_TYPE (type);
4625
 
4626
  copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4627
                     VAR_DECL, DECL_NAME (decl), type);
4628
  TREE_READONLY (copy) = TREE_READONLY (decl);
4629
  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4630
  if (!DECL_BY_REFERENCE (decl))
4631
    {
4632
      TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4633
      DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4634
    }
4635
 
4636
  return copy_decl_for_dup_finish (id, decl, copy);
4637
}
4638
 
4639
tree
4640
copy_decl_no_change (tree decl, copy_body_data *id)
4641
{
4642
  tree copy;
4643
 
4644
  copy = copy_node (decl);
4645
 
4646
  /* The COPY is not abstract; it will be generated in DST_FN.  */
4647
  DECL_ABSTRACT (copy) = 0;
4648
  lang_hooks.dup_lang_specific_decl (copy);
4649
 
4650
  /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4651
     been taken; it's for internal bookkeeping in expand_goto_internal.  */
4652
  if (TREE_CODE (copy) == LABEL_DECL)
4653
    {
4654
      TREE_ADDRESSABLE (copy) = 0;
4655
      LABEL_DECL_UID (copy) = -1;
4656
    }
4657
 
4658
  return copy_decl_for_dup_finish (id, decl, copy);
4659
}
4660
 
4661
static tree
4662
copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4663
{
4664
  if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4665
    return copy_decl_to_var (decl, id);
4666
  else
4667
    return copy_decl_no_change (decl, id);
4668
}
4669
 
4670
/* Return a copy of the function's argument tree.  */
4671
static tree
4672
copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4673
                               bitmap args_to_skip, tree *vars)
4674
{
4675
  tree arg, *parg;
4676
  tree new_parm = NULL;
4677
  int i = 0;
4678
 
4679
  parg = &new_parm;
4680
 
4681
  for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4682
    if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4683
      {
4684
        tree new_tree = remap_decl (arg, id);
4685
        lang_hooks.dup_lang_specific_decl (new_tree);
4686
        *parg = new_tree;
4687
        parg = &TREE_CHAIN (new_tree);
4688
      }
4689
    else if (!pointer_map_contains (id->decl_map, arg))
4690
      {
4691
        /* Make an equivalent VAR_DECL.  If the argument was used
4692
           as temporary variable later in function, the uses will be
4693
           replaced by local variable.  */
4694
        tree var = copy_decl_to_var (arg, id);
4695
        get_var_ann (var);
4696
        add_referenced_var (var);
4697
        insert_decl_map (id, arg, var);
4698
        /* Declare this new variable.  */
4699
        TREE_CHAIN (var) = *vars;
4700
        *vars = var;
4701
      }
4702
  return new_parm;
4703
}
4704
 
4705
/* Return a copy of the function's static chain.  */
4706
static tree
4707
copy_static_chain (tree static_chain, copy_body_data * id)
4708
{
4709
  tree *chain_copy, *pvar;
4710
 
4711
  chain_copy = &static_chain;
4712
  for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4713
    {
4714
      tree new_tree = remap_decl (*pvar, id);
4715
      lang_hooks.dup_lang_specific_decl (new_tree);
4716
      TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4717
      *pvar = new_tree;
4718
    }
4719
  return static_chain;
4720
}
4721
 
4722
/* Return true if the function is allowed to be versioned.
4723
   This is a guard for the versioning functionality.  */
4724
 
4725
bool
4726
tree_versionable_function_p (tree fndecl)
4727
{
4728
  return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4729
          && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4730
}
4731
 
4732
/* Delete all unreachable basic blocks and update callgraph.
4733
   Doing so is somewhat nontrivial because we need to update all clones and
4734
   remove inline function that become unreachable.  */
4735
 
4736
static bool
4737
delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4738
{
4739
  bool changed = false;
4740
  basic_block b, next_bb;
4741
 
4742
  find_unreachable_blocks ();
4743
 
4744
  /* Delete all unreachable basic blocks.  */
4745
 
4746
  for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4747
    {
4748
      next_bb = b->next_bb;
4749
 
4750
      if (!(b->flags & BB_REACHABLE))
4751
        {
4752
          gimple_stmt_iterator bsi;
4753
 
4754
          for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4755
            if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4756
              {
4757
                struct cgraph_edge *e;
4758
                struct cgraph_node *node;
4759
 
4760
                if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4761
                  {
4762
                    if (!e->inline_failed)
4763
                      cgraph_remove_node_and_inline_clones (e->callee);
4764
                    else
4765
                      cgraph_remove_edge (e);
4766
                  }
4767
                if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4768
                    && id->dst_node->clones)
4769
                  for (node = id->dst_node->clones; node != id->dst_node;)
4770
                    {
4771
                      if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4772
                        {
4773
                          if (!e->inline_failed)
4774
                            cgraph_remove_node_and_inline_clones (e->callee);
4775
                          else
4776
                            cgraph_remove_edge (e);
4777
                        }
4778
 
4779
                      if (node->clones)
4780
                        node = node->clones;
4781
                      else if (node->next_sibling_clone)
4782
                        node = node->next_sibling_clone;
4783
                      else
4784
                        {
4785
                          while (node != id->dst_node && !node->next_sibling_clone)
4786
                            node = node->clone_of;
4787
                          if (node != id->dst_node)
4788
                            node = node->next_sibling_clone;
4789
                        }
4790
                    }
4791
              }
4792
          delete_basic_block (b);
4793
          changed = true;
4794
        }
4795
    }
4796
 
4797
  if (changed)
4798
    tidy_fallthru_edges ();
4799
  return changed;
4800
}
4801
 
4802
/* Update clone info after duplication.  */
4803
 
4804
static void
4805
update_clone_info (copy_body_data * id)
4806
{
4807
  struct cgraph_node *node;
4808
  if (!id->dst_node->clones)
4809
    return;
4810
  for (node = id->dst_node->clones; node != id->dst_node;)
4811
    {
4812
      /* First update replace maps to match the new body.  */
4813
      if (node->clone.tree_map)
4814
        {
4815
          unsigned int i;
4816
          for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4817
            {
4818
              struct ipa_replace_map *replace_info;
4819
              replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4820
              walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4821
              walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4822
            }
4823
        }
4824
      if (node->clones)
4825
        node = node->clones;
4826
      else if (node->next_sibling_clone)
4827
        node = node->next_sibling_clone;
4828
      else
4829
        {
4830
          while (node != id->dst_node && !node->next_sibling_clone)
4831
            node = node->clone_of;
4832
          if (node != id->dst_node)
4833
            node = node->next_sibling_clone;
4834
        }
4835
    }
4836
}
4837
 
4838
/* Create a copy of a function's tree.
4839
   OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4840
   of the original function and the new copied function
4841
   respectively.  In case we want to replace a DECL
4842
   tree with another tree while duplicating the function's
4843
   body, TREE_MAP represents the mapping between these
4844
   trees. If UPDATE_CLONES is set, the call_stmt fields
4845
   of edges of clones of the function will be updated.  */
4846
void
4847
tree_function_versioning (tree old_decl, tree new_decl,
4848
                          VEC(ipa_replace_map_p,gc)* tree_map,
4849
                          bool update_clones, bitmap args_to_skip)
4850
{
4851
  struct cgraph_node *old_version_node;
4852
  struct cgraph_node *new_version_node;
4853
  copy_body_data id;
4854
  tree p;
4855
  unsigned i;
4856
  struct ipa_replace_map *replace_info;
4857
  basic_block old_entry_block, bb;
4858
  VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4859
 
4860
  tree t_step;
4861
  tree old_current_function_decl = current_function_decl;
4862
  tree vars = NULL_TREE;
4863
 
4864
  gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4865
              && TREE_CODE (new_decl) == FUNCTION_DECL);
4866
  DECL_POSSIBLY_INLINED (old_decl) = 1;
4867
 
4868
  old_version_node = cgraph_node (old_decl);
4869
  new_version_node = cgraph_node (new_decl);
4870
 
4871
  /* Output the inlining info for this abstract function, since it has been
4872
     inlined.  If we don't do this now, we can lose the information about the
4873
     variables in the function when the blocks get blown away as soon as we
4874
     remove the cgraph node.  */
4875
  (*debug_hooks->outlining_inline_function) (old_decl);
4876
 
4877
  DECL_ARTIFICIAL (new_decl) = 1;
4878
  DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4879
  DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
4880
 
4881
  /* Prepare the data structures for the tree copy.  */
4882
  memset (&id, 0, sizeof (id));
4883
 
4884
  /* Generate a new name for the new version. */
4885
  id.statements_to_fold = pointer_set_create ();
4886
 
4887
  id.decl_map = pointer_map_create ();
4888
  id.debug_map = NULL;
4889
  id.src_fn = old_decl;
4890
  id.dst_fn = new_decl;
4891
  id.src_node = old_version_node;
4892
  id.dst_node = new_version_node;
4893
  id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4894
  if (id.src_node->ipa_transforms_to_apply)
4895
    {
4896
      VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
4897
      unsigned int i;
4898
 
4899
      id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
4900
                                                       id.src_node->ipa_transforms_to_apply);
4901
      for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
4902
        VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
4903
                       VEC_index (ipa_opt_pass,
4904
                                  old_transforms_to_apply,
4905
                                  i));
4906
    }
4907
 
4908
  id.copy_decl = copy_decl_no_change;
4909
  id.transform_call_graph_edges
4910
    = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4911
  id.transform_new_cfg = true;
4912
  id.transform_return_to_modify = false;
4913
  id.transform_lang_insert_block = NULL;
4914
 
4915
  current_function_decl = new_decl;
4916
  old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4917
    (DECL_STRUCT_FUNCTION (old_decl));
4918
  initialize_cfun (new_decl, old_decl,
4919
                   old_entry_block->count);
4920
  push_cfun (DECL_STRUCT_FUNCTION (new_decl));
4921
 
4922
  /* Copy the function's static chain.  */
4923
  p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4924
  if (p)
4925
    DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4926
      copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4927
                         &id);
4928
 
4929
  /* If there's a tree_map, prepare for substitution.  */
4930
  if (tree_map)
4931
    for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
4932
      {
4933
        gimple init;
4934
        replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
4935
        if (replace_info->replace_p)
4936
          {
4937
            tree op = replace_info->new_tree;
4938
 
4939
            STRIP_NOPS (op);
4940
 
4941
            if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4942
              op = TREE_OPERAND (op, 0);
4943
 
4944
            if (TREE_CODE (op) == ADDR_EXPR)
4945
              {
4946
                op = TREE_OPERAND (op, 0);
4947
                while (handled_component_p (op))
4948
                  op = TREE_OPERAND (op, 0);
4949
                if (TREE_CODE (op) == VAR_DECL)
4950
                  add_referenced_var (op);
4951
              }
4952
            gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4953
            init = setup_one_parameter (&id, replace_info->old_tree,
4954
                                        replace_info->new_tree, id.src_fn,
4955
                                        NULL,
4956
                                        &vars);
4957
            if (init)
4958
              VEC_safe_push (gimple, heap, init_stmts, init);
4959
          }
4960
      }
4961
  /* Copy the function's arguments.  */
4962
  if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4963
    DECL_ARGUMENTS (new_decl) =
4964
      copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4965
                                     args_to_skip, &vars);
4966
 
4967
  DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4968
 
4969
  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
4970
  number_blocks (id.dst_fn);
4971
 
4972
  declare_inline_vars (DECL_INITIAL (new_decl), vars);
4973
 
4974
  if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
4975
    /* Add local vars.  */
4976
    for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
4977
         t_step; t_step = TREE_CHAIN (t_step))
4978
      {
4979
        tree var = TREE_VALUE (t_step);
4980
        if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
4981
          cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
4982
        else if (!can_be_nonlocal (var, &id))
4983
          cfun->local_decls =
4984
            tree_cons (NULL_TREE, remap_decl (var, &id),
4985
                       cfun->local_decls);
4986
      }
4987
 
4988
  /* Copy the Function's body.  */
4989
  copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
4990
             ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
4991
 
4992
  if (DECL_RESULT (old_decl) != NULL_TREE)
4993
    {
4994
      tree *res_decl = &DECL_RESULT (old_decl);
4995
      DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
4996
      lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
4997
    }
4998
 
4999
  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5000
  number_blocks (new_decl);
5001
 
5002
  /* We want to create the BB unconditionally, so that the addition of
5003
     debug stmts doesn't affect BB count, which may in the end cause
5004
     codegen differences.  */
5005
  bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5006
  while (VEC_length (gimple, init_stmts))
5007
    insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5008
  update_clone_info (&id);
5009
 
5010
  /* Remap the nonlocal_goto_save_area, if any.  */
5011
  if (cfun->nonlocal_goto_save_area)
5012
    {
5013
      struct walk_stmt_info wi;
5014
 
5015
      memset (&wi, 0, sizeof (wi));
5016
      wi.info = &id;
5017
      walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5018
    }
5019
 
5020
  /* Clean up.  */
5021
  pointer_map_destroy (id.decl_map);
5022
  if (id.debug_map)
5023
    pointer_map_destroy (id.debug_map);
5024
  free_dominance_info (CDI_DOMINATORS);
5025
  free_dominance_info (CDI_POST_DOMINATORS);
5026
 
5027
  fold_marked_statements (0, id.statements_to_fold);
5028
  pointer_set_destroy (id.statements_to_fold);
5029
  fold_cond_expr_cond ();
5030
  delete_unreachable_blocks_update_callgraph (&id);
5031
  update_ssa (TODO_update_ssa);
5032
  free_dominance_info (CDI_DOMINATORS);
5033
  free_dominance_info (CDI_POST_DOMINATORS);
5034
 
5035
  gcc_assert (!id.debug_stmts);
5036
  VEC_free (gimple, heap, init_stmts);
5037
  pop_cfun ();
5038
  current_function_decl = old_current_function_decl;
5039
  gcc_assert (!current_function_decl
5040
              || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5041
  return;
5042
}
5043
 
5044
/* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
5045
   the callee and return the inlined body on success.  */
5046
 
5047
tree
5048
maybe_inline_call_in_expr (tree exp)
5049
{
5050
  tree fn = get_callee_fndecl (exp);
5051
 
5052
  /* We can only try to inline "const" functions.  */
5053
  if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5054
    {
5055
      struct pointer_map_t *decl_map = pointer_map_create ();
5056
      call_expr_arg_iterator iter;
5057
      copy_body_data id;
5058
      tree param, arg, t;
5059
 
5060
      /* Remap the parameters.  */
5061
      for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5062
           param;
5063
           param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
5064
        *pointer_map_insert (decl_map, param) = arg;
5065
 
5066
      memset (&id, 0, sizeof (id));
5067
      id.src_fn = fn;
5068
      id.dst_fn = current_function_decl;
5069
      id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5070
      id.decl_map = decl_map;
5071
 
5072
      id.copy_decl = copy_decl_no_change;
5073
      id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5074
      id.transform_new_cfg = false;
5075
      id.transform_return_to_modify = true;
5076
      id.transform_lang_insert_block = false;
5077
 
5078
      /* Make sure not to unshare trees behind the front-end's back
5079
         since front-end specific mechanisms may rely on sharing.  */
5080
      id.regimplify = false;
5081
      id.do_not_unshare = true;
5082
 
5083
      /* We're not inside any EH region.  */
5084
      id.eh_lp_nr = 0;
5085
 
5086
      t = copy_tree_body (&id);
5087
      pointer_map_destroy (decl_map);
5088
 
5089
      /* We can only return something suitable for use in a GENERIC
5090
         expression tree.  */
5091
      if (TREE_CODE (t) == MODIFY_EXPR)
5092
        return TREE_OPERAND (t, 1);
5093
    }
5094
 
5095
   return NULL_TREE;
5096
}
5097
 
5098
/* Duplicate a type, fields and all.  */
5099
 
5100
tree
5101
build_duplicate_type (tree type)
5102
{
5103
  struct copy_body_data id;
5104
 
5105
  memset (&id, 0, sizeof (id));
5106
  id.src_fn = current_function_decl;
5107
  id.dst_fn = current_function_decl;
5108
  id.src_cfun = cfun;
5109
  id.decl_map = pointer_map_create ();
5110
  id.debug_map = NULL;
5111
  id.copy_decl = copy_decl_no_change;
5112
 
5113
  type = remap_type_1 (type, &id);
5114
 
5115
  pointer_map_destroy (id.decl_map);
5116
  if (id.debug_map)
5117
    pointer_map_destroy (id.debug_map);
5118
 
5119
  TYPE_CANONICAL (type) = type;
5120
 
5121
  return type;
5122
}
5123
 
5124
/* Return whether it is safe to inline a function because it used different
5125
   target specific options or call site actual types mismatch parameter types.
5126
   E is the call edge to be checked.  */
5127
bool
5128
tree_can_inline_p (struct cgraph_edge *e)
5129
{
5130
#if 0
5131
  /* This causes a regression in SPEC in that it prevents a cold function from
5132
     inlining a hot function.  Perhaps this should only apply to functions
5133
     that the user declares hot/cold/optimize explicitly.  */
5134
 
5135
  /* Don't inline a function with a higher optimization level than the
5136
     caller, or with different space constraints (hot/cold functions).  */
5137
  tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5138
  tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5139
 
5140
  if (caller_tree != callee_tree)
5141
    {
5142
      struct cl_optimization *caller_opt
5143
        = TREE_OPTIMIZATION ((caller_tree)
5144
                             ? caller_tree
5145
                             : optimization_default_node);
5146
 
5147
      struct cl_optimization *callee_opt
5148
        = TREE_OPTIMIZATION ((callee_tree)
5149
                             ? callee_tree
5150
                             : optimization_default_node);
5151
 
5152
      if ((caller_opt->optimize > callee_opt->optimize)
5153
          || (caller_opt->optimize_size != callee_opt->optimize_size))
5154
        return false;
5155
    }
5156
#endif
5157
  tree caller, callee, lhs;
5158
 
5159
  caller = e->caller->decl;
5160
  callee = e->callee->decl;
5161
 
5162
  /* We cannot inline a function that uses a different EH personality
5163
     than the caller.  */
5164
  if (DECL_FUNCTION_PERSONALITY (caller)
5165
      && DECL_FUNCTION_PERSONALITY (callee)
5166
      && (DECL_FUNCTION_PERSONALITY (caller)
5167
          != DECL_FUNCTION_PERSONALITY (callee)))
5168
    {
5169
      e->inline_failed = CIF_UNSPECIFIED;
5170
      gimple_call_set_cannot_inline (e->call_stmt, true);
5171
      return false;
5172
    }
5173
 
5174
  /* Allow the backend to decide if inlining is ok.  */
5175
  if (!targetm.target_option.can_inline_p (caller, callee))
5176
    {
5177
      e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5178
      gimple_call_set_cannot_inline (e->call_stmt, true);
5179
      e->call_stmt_cannot_inline_p = true;
5180
      return false;
5181
    }
5182
 
5183
  /* Do not inline calls where we cannot triviall work around mismatches
5184
     in argument or return types.  */
5185
  if (e->call_stmt
5186
      && ((DECL_RESULT (callee)
5187
           && !DECL_BY_REFERENCE (DECL_RESULT (callee))
5188
           && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE
5189
           && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
5190
                                          TREE_TYPE (lhs))
5191
           && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
5192
          || !gimple_check_call_args (e->call_stmt)))
5193
    {
5194
      e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5195
      gimple_call_set_cannot_inline (e->call_stmt, true);
5196
      e->call_stmt_cannot_inline_p = true;
5197
      return false;
5198
    }
5199
 
5200
  return true;
5201
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.