OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [tree-inline.c] - Blame information for rev 20

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Tree inlining.
2
   Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3
   Contributed by Alexandre Oliva <aoliva@redhat.com>
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify
8
it under the terms of the GNU General Public License as published by
9
the Free Software Foundation; either version 2, or (at your option)
10
any later version.
11
 
12
GCC is distributed in the hope that it will be useful,
13
but WITHOUT ANY WARRANTY; without even the implied warranty of
14
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15
GNU General Public License for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING.  If not, write to
19
the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20
Boston, MA 02110-1301, USA.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "toplev.h"
27
#include "tree.h"
28
#include "tree-inline.h"
29
#include "rtl.h"
30
#include "expr.h"
31
#include "flags.h"
32
#include "params.h"
33
#include "input.h"
34
#include "insn-config.h"
35
#include "varray.h"
36
#include "hashtab.h"
37
#include "splay-tree.h"
38
#include "langhooks.h"
39
#include "basic-block.h"
40
#include "tree-iterator.h"
41
#include "cgraph.h"
42
#include "intl.h"
43
#include "tree-mudflap.h"
44
#include "tree-flow.h"
45
#include "function.h"
46
#include "ggc.h"
47
#include "tree-flow.h"
48
#include "diagnostic.h"
49
#include "except.h"
50
#include "debug.h"
51
#include "pointer-set.h"
52
#include "ipa-prop.h"
53
 
54
/* I'm not real happy about this, but we need to handle gimple and
55
   non-gimple trees.  */
56
#include "tree-gimple.h"
57
 
58
/* Inlining, Saving, Cloning
59
 
60
   Inlining: a function body is duplicated, but the PARM_DECLs are
61
   remapped into VAR_DECLs, and non-void RETURN_EXPRs become
62
   MODIFY_EXPRs that store to a dedicated returned-value variable.
63
   The duplicated eh_region info of the copy will later be appended
64
   to the info for the caller; the eh_region info in copied throwing
65
   statements and RESX_EXPRs is adjusted accordingly.
66
 
67
   Saving: make a semantically-identical copy of the function body.
68
   Necessary when we want to generate code for the body (a destructive
69
   operation), but we expect to need this body in the future (e.g. for
70
   inlining into another function).
71
 
72
   Cloning: (only in C++) We have one body for a con/de/structor, and
73
   multiple function decls, each with a unique parameter list.
74
   Duplicate the body, using the given splay tree; some parameters
75
   will become constants (like 0 or 1).
76
 
77
   All of these will simultaneously lookup any callgraph edges.  If
78
   we're going to inline the duplicated function body, and the given
79
   function has some cloned callgraph nodes (one for each place this
80
   function will be inlined) those callgraph edges will be duplicated.
81
   If we're saving or cloning the body, those callgraph edges will be
82
   updated to point into the new body.  (Note that the original
83
   callgraph node and edge list will not be altered.)
84
 
85
   See the CALL_EXPR handling case in copy_body_r ().  */
86
 
87
/* 0 if we should not perform inlining.
88
   1 if we should expand functions calls inline at the tree level.
89
   2 if we should consider *all* functions to be inline
90
   candidates.  */
91
 
92
int flag_inline_trees = 0;
93
 
94
/* To Do:
95
 
96
   o In order to make inlining-on-trees work, we pessimized
97
     function-local static constants.  In particular, they are now
98
     always output, even when not addressed.  Fix this by treating
99
     function-local static constants just like global static
100
     constants; the back-end already knows not to output them if they
101
     are not needed.
102
 
103
   o Provide heuristics to clamp inlining of recursive template
104
     calls?  */
105
 
106
/* Data required for function inlining.  */
107
 
108
typedef struct inline_data
109
{
110
  /* FUNCTION_DECL for function being inlined.  */
111
  tree callee;
112
  /* FUNCTION_DECL for function being inlined into.  */
113
  tree caller;
114
  /* struct function for function being inlined.  Usually this is the same
115
     as DECL_STRUCT_FUNCTION (callee), but can be different if saved_cfg
116
     and saved_eh are in use.  */
117
  struct function *callee_cfun;
118
  /* The VAR_DECL for the return value.  */
119
  tree retvar;
120
  /* The map from local declarations in the inlined function to
121
     equivalents in the function into which it is being inlined.  */
122
  splay_tree decl_map;
123
  /* We use the same mechanism to build clones that we do to perform
124
     inlining.  However, there are a few places where we need to
125
     distinguish between those two situations.  This flag is true if
126
     we are cloning, rather than inlining.  */
127
  bool cloning_p;
128
  /* Similarly for saving function body.  */
129
  bool saving_p;
130
  /* Versioning function is slightly different from inlining. */
131
  bool versioning_p;
132
  /* Callgraph node of function we are inlining into.  */
133
  struct cgraph_node *node;
134
  /* Callgraph node of currently inlined function.  */
135
  struct cgraph_node *current_node;
136
  /* Current BLOCK.  */
137
  tree block;
138
  varray_type ipa_info;
139
  /* Exception region the inlined call lie in.  */
140
  int eh_region;
141
  /* Take region number in the function being copied, add this value and
142
     get eh region number of the duplicate in the function we inline into.  */
143
  int eh_region_offset;
144
} inline_data;
145
 
146
/* Prototypes.  */
147
 
148
static tree declare_return_variable (inline_data *, tree, tree, tree *);
149
static tree copy_body_r (tree *, int *, void *);
150
static tree copy_generic_body (inline_data *);
151
static bool inlinable_function_p (tree);
152
static tree remap_decl (tree, inline_data *);
153
static tree remap_type (tree, inline_data *);
154
static void remap_block (tree *, inline_data *);
155
static tree remap_decl (tree, inline_data *);
156
static tree remap_decls (tree, inline_data *);
157
static void copy_bind_expr (tree *, int *, inline_data *);
158
static tree mark_local_for_remap_r (tree *, int *, void *);
159
static void unsave_expr_1 (tree);
160
static tree unsave_r (tree *, int *, void *);
161
static void declare_inline_vars (tree, tree);
162
static void remap_save_expr (tree *, void *, int *);
163
static bool replace_ref_tree (inline_data *, tree *);
164
static inline bool inlining_p (inline_data *);
165
static void add_lexical_block (tree current_block, tree new_block);
166
 
167
/* Insert a tree->tree mapping for ID.  Despite the name suggests
168
   that the trees should be variables, it is used for more than that.  */
169
 
170
static void
171
insert_decl_map (inline_data *id, tree key, tree value)
172
{
173
  splay_tree_insert (id->decl_map, (splay_tree_key) key,
174
                     (splay_tree_value) value);
175
 
176
  /* Always insert an identity map as well.  If we see this same new
177
     node again, we won't want to duplicate it a second time.  */
178
  if (key != value)
179
    splay_tree_insert (id->decl_map, (splay_tree_key) value,
180
                       (splay_tree_value) value);
181
}
182
 
183
/* Remap DECL during the copying of the BLOCK tree for the function.  */
184
 
185
static tree
186
remap_decl (tree decl, inline_data *id)
187
{
188
  splay_tree_node n;
189
  tree fn;
190
 
191
  /* We only remap local variables in the current function.  */
192
  fn = id->callee;
193
 
194
  /* See if we have remapped this declaration.  */
195
 
196
  n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
197
 
198
  /* If we didn't already have an equivalent for this declaration,
199
     create one now.  */
200
  if (!n)
201
    {
202
      /* Make a copy of the variable or label.  */
203
      tree t;
204
      t = copy_decl_for_dup (decl, fn, id->caller, id->versioning_p);
205
 
206
      /* Remember it, so that if we encounter this local entity again
207
         we can reuse this copy.  Do this early because remap_type may
208
         need this decl for TYPE_STUB_DECL.  */
209
      insert_decl_map (id, decl, t);
210
 
211
      /* Remap types, if necessary.  */
212
      TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
213
      if (TREE_CODE (t) == TYPE_DECL)
214
        DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
215
 
216
      /* Remap sizes as necessary.  */
217
      walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
218
      walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
219
 
220
      /* If fields, do likewise for offset and qualifier.  */
221
      if (TREE_CODE (t) == FIELD_DECL)
222
        {
223
          walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
224
          if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
225
            walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
226
        }
227
 
228
#if 0
229
      /* FIXME handle anon aggrs.  */
230
      if (! DECL_NAME (t) && TREE_TYPE (t)
231
          && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
232
        {
233
          /* For a VAR_DECL of anonymous type, we must also copy the
234
             member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS.  */
235
          tree members = NULL;
236
          tree src;
237
 
238
          for (src = DECL_ANON_UNION_ELEMS (t); src;
239
               src = TREE_CHAIN (src))
240
            {
241
              tree member = remap_decl (TREE_VALUE (src), id);
242
 
243
              gcc_assert (!TREE_PURPOSE (src));
244
              members = tree_cons (NULL, member, members);
245
            }
246
          DECL_ANON_UNION_ELEMS (t) = nreverse (members);
247
        }
248
#endif
249
 
250
      /* Remember it, so that if we encounter this local entity
251
         again we can reuse this copy.  */
252
      insert_decl_map (id, decl, t);
253
      return t;
254
    }
255
 
256
  return unshare_expr ((tree) n->value);
257
}
258
 
259
static tree
260
remap_type_1 (tree type, inline_data *id)
261
{
262
  tree new, t;
263
 
264
  /* We do need a copy.  build and register it now.  If this is a pointer or
265
     reference type, remap the designated type and make a new pointer or
266
     reference type.  */
267
  if (TREE_CODE (type) == POINTER_TYPE)
268
    {
269
      new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
270
                                         TYPE_MODE (type),
271
                                         TYPE_REF_CAN_ALIAS_ALL (type));
272
      insert_decl_map (id, type, new);
273
      return new;
274
    }
275
  else if (TREE_CODE (type) == REFERENCE_TYPE)
276
    {
277
      new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
278
                                            TYPE_MODE (type),
279
                                            TYPE_REF_CAN_ALIAS_ALL (type));
280
      insert_decl_map (id, type, new);
281
      return new;
282
    }
283
  else
284
    new = copy_node (type);
285
 
286
  insert_decl_map (id, type, new);
287
 
288
  /* This is a new type, not a copy of an old type.  Need to reassociate
289
     variants.  We can handle everything except the main variant lazily.  */
290
  t = TYPE_MAIN_VARIANT (type);
291
  if (type != t)
292
    {
293
      t = remap_type (t, id);
294
      TYPE_MAIN_VARIANT (new) = t;
295
      TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
296
      TYPE_NEXT_VARIANT (t) = new;
297
    }
298
  else
299
    {
300
      TYPE_MAIN_VARIANT (new) = new;
301
      TYPE_NEXT_VARIANT (new) = NULL;
302
    }
303
 
304
  if (TYPE_STUB_DECL (type))
305
    TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
306
 
307
  /* Lazily create pointer and reference types.  */
308
  TYPE_POINTER_TO (new) = NULL;
309
  TYPE_REFERENCE_TO (new) = NULL;
310
 
311
  switch (TREE_CODE (new))
312
    {
313
    case INTEGER_TYPE:
314
    case REAL_TYPE:
315
    case ENUMERAL_TYPE:
316
    case BOOLEAN_TYPE:
317
    case CHAR_TYPE:
318
      t = TYPE_MIN_VALUE (new);
319
      if (t && TREE_CODE (t) != INTEGER_CST)
320
        walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
321
 
322
      t = TYPE_MAX_VALUE (new);
323
      if (t && TREE_CODE (t) != INTEGER_CST)
324
        walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
325
      return new;
326
 
327
    case FUNCTION_TYPE:
328
      TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
329
      walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
330
      return new;
331
 
332
    case ARRAY_TYPE:
333
      TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
334
      TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
335
      break;
336
 
337
    case RECORD_TYPE:
338
    case UNION_TYPE:
339
    case QUAL_UNION_TYPE:
340
      {
341
        tree f, nf = NULL;
342
 
343
        for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
344
          {
345
            t = remap_decl (f, id);
346
            DECL_CONTEXT (t) = new;
347
            TREE_CHAIN (t) = nf;
348
            nf = t;
349
          }
350
        TYPE_FIELDS (new) = nreverse (nf);
351
      }
352
      break;
353
 
354
    case OFFSET_TYPE:
355
    default:
356
      /* Shouldn't have been thought variable sized.  */
357
      gcc_unreachable ();
358
    }
359
 
360
  walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
361
  walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
362
 
363
  return new;
364
}
365
 
366
static tree
367
remap_type (tree type, inline_data *id)
368
{
369
  splay_tree_node node;
370
 
371
  if (type == NULL)
372
    return type;
373
 
374
  /* See if we have remapped this type.  */
375
  node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
376
  if (node)
377
    return (tree) node->value;
378
 
379
  /* The type only needs remapping if it's variably modified.  */
380
  if (! variably_modified_type_p (type, id->callee))
381
    {
382
      insert_decl_map (id, type, type);
383
      return type;
384
    }
385
 
386
  return remap_type_1 (type, id);
387
}
388
 
389
static tree
390
remap_decls (tree decls, inline_data *id)
391
{
392
  tree old_var;
393
  tree new_decls = NULL_TREE;
394
 
395
  /* Remap its variables.  */
396
  for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
397
    {
398
      tree new_var;
399
 
400
      /* We can not chain the local static declarations into the unexpanded_var_list
401
         as we can't duplicate them or break one decl rule.  Go ahead and link
402
         them into unexpanded_var_list.  */
403
      if (!lang_hooks.tree_inlining.auto_var_in_fn_p (old_var, id->callee)
404
          && !DECL_EXTERNAL (old_var))
405
        {
406
          cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
407
                                                 cfun->unexpanded_var_list);
408
          continue;
409
        }
410
 
411
      /* Remap the variable.  */
412
      new_var = remap_decl (old_var, id);
413
 
414
      /* If we didn't remap this variable, so we can't mess with its
415
         TREE_CHAIN.  If we remapped this variable to the return slot, it's
416
         already declared somewhere else, so don't declare it here.  */
417
      if (!new_var || new_var == id->retvar)
418
        ;
419
      else
420
        {
421
          gcc_assert (DECL_P (new_var));
422
          TREE_CHAIN (new_var) = new_decls;
423
          new_decls = new_var;
424
        }
425
    }
426
 
427
  return nreverse (new_decls);
428
}
429
 
430
/* Copy the BLOCK to contain remapped versions of the variables
431
   therein.  And hook the new block into the block-tree.  */
432
 
433
static void
434
remap_block (tree *block, inline_data *id)
435
{
436
  tree old_block;
437
  tree new_block;
438
  tree fn;
439
 
440
  /* Make the new block.  */
441
  old_block = *block;
442
  new_block = make_node (BLOCK);
443
  TREE_USED (new_block) = TREE_USED (old_block);
444
  BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
445
  BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
446
  *block = new_block;
447
 
448
  /* Remap its variables.  */
449
  BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
450
 
451
  fn = id->caller;
452
  if (id->cloning_p)
453
    /* We're building a clone; DECL_INITIAL is still
454
       error_mark_node, and current_binding_level is the parm
455
       binding level.  */
456
    lang_hooks.decls.insert_block (new_block);
457
  /* Remember the remapped block.  */
458
  insert_decl_map (id, old_block, new_block);
459
}
460
 
461
/* Copy the whole block tree and root it in id->block.  */
462
static tree
463
remap_blocks (tree block, inline_data *id)
464
{
465
  tree t;
466
  tree new = block;
467
 
468
  if (!block)
469
    return NULL;
470
 
471
  remap_block (&new, id);
472
  gcc_assert (new != block);
473
  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
474
    add_lexical_block (new, remap_blocks (t, id));
475
  return new;
476
}
477
 
478
static void
479
copy_statement_list (tree *tp)
480
{
481
  tree_stmt_iterator oi, ni;
482
  tree new;
483
 
484
  new = alloc_stmt_list ();
485
  ni = tsi_start (new);
486
  oi = tsi_start (*tp);
487
  *tp = new;
488
 
489
  for (; !tsi_end_p (oi); tsi_next (&oi))
490
    tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
491
}
492
 
493
static void
494
copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
495
{
496
  tree block = BIND_EXPR_BLOCK (*tp);
497
  /* Copy (and replace) the statement.  */
498
  copy_tree_r (tp, walk_subtrees, NULL);
499
  if (block)
500
    {
501
      remap_block (&block, id);
502
      BIND_EXPR_BLOCK (*tp) = block;
503
    }
504
 
505
  if (BIND_EXPR_VARS (*tp))
506
    /* This will remap a lot of the same decls again, but this should be
507
       harmless.  */
508
    BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
509
}
510
 
511
/* Called from copy_body_id via walk_tree.  DATA is really an
512
   `inline_data *'.  */
513
 
514
static tree
515
copy_body_r (tree *tp, int *walk_subtrees, void *data)
516
{
517
  inline_data *id = (inline_data *) data;
518
  tree fn = id->callee;
519
  tree new_block;
520
 
521
  /* Begin by recognizing trees that we'll completely rewrite for the
522
     inlining context.  Our output for these trees is completely
523
     different from out input (e.g. RETURN_EXPR is deleted, and morphs
524
     into an edge).  Further down, we'll handle trees that get
525
     duplicated and/or tweaked.  */
526
 
527
  /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
528
     GOTO_STMT with the RET_LABEL as its target.  */
529
  if (TREE_CODE (*tp) == RETURN_EXPR && inlining_p (id))
530
    {
531
      tree assignment = TREE_OPERAND (*tp, 0);
532
 
533
      /* If we're returning something, just turn that into an
534
         assignment into the equivalent of the original RESULT_DECL.
535
         If the "assignment" is just the result decl, the result
536
         decl has already been set (e.g. a recent "foo (&result_decl,
537
         ...)"); just toss the entire RETURN_EXPR.  */
538
      if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
539
        {
540
          /* Replace the RETURN_EXPR with (a copy of) the
541
             MODIFY_EXPR hanging underneath.  */
542
          *tp = copy_node (assignment);
543
        }
544
      else /* Else the RETURN_EXPR returns no value.  */
545
        {
546
          *tp = NULL;
547
          return (void *)1;
548
        }
549
    }
550
 
551
  /* Local variables and labels need to be replaced by equivalent
552
     variables.  We don't want to copy static variables; there's only
553
     one of those, no matter how many times we inline the containing
554
     function.  Similarly for globals from an outer function.  */
555
  else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
556
    {
557
      tree new_decl;
558
 
559
      /* Remap the declaration.  */
560
      new_decl = remap_decl (*tp, id);
561
      gcc_assert (new_decl);
562
      /* Replace this variable with the copy.  */
563
      STRIP_TYPE_NOPS (new_decl);
564
      *tp = new_decl;
565
      *walk_subtrees = 0;
566
    }
567
  else if (TREE_CODE (*tp) == STATEMENT_LIST)
568
    copy_statement_list (tp);
569
  else if (TREE_CODE (*tp) == SAVE_EXPR)
570
    remap_save_expr (tp, id->decl_map, walk_subtrees);
571
  else if (TREE_CODE (*tp) == LABEL_DECL
572
           && (! DECL_CONTEXT (*tp)
573
               || decl_function_context (*tp) == id->callee))
574
    /* These may need to be remapped for EH handling.  */
575
    *tp = remap_decl (*tp, id);
576
  else if (TREE_CODE (*tp) == BIND_EXPR)
577
    copy_bind_expr (tp, walk_subtrees, id);
578
  /* Types may need remapping as well.  */
579
  else if (TYPE_P (*tp))
580
    *tp = remap_type (*tp, id);
581
 
582
  /* If this is a constant, we have to copy the node iff the type will be
583
     remapped.  copy_tree_r will not copy a constant.  */
584
  else if (CONSTANT_CLASS_P (*tp))
585
    {
586
      tree new_type = remap_type (TREE_TYPE (*tp), id);
587
 
588
      if (new_type == TREE_TYPE (*tp))
589
        *walk_subtrees = 0;
590
 
591
      else if (TREE_CODE (*tp) == INTEGER_CST)
592
        *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
593
                                  TREE_INT_CST_HIGH (*tp));
594
      else
595
        {
596
          *tp = copy_node (*tp);
597
          TREE_TYPE (*tp) = new_type;
598
        }
599
    }
600
 
601
  /* Otherwise, just copy the node.  Note that copy_tree_r already
602
     knows not to copy VAR_DECLs, etc., so this is safe.  */
603
  else
604
    {
605
      /* Here we handle trees that are not completely rewritten.
606
         First we detect some inlining-induced bogosities for
607
         discarding.  */
608
      if (TREE_CODE (*tp) == MODIFY_EXPR
609
          && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
610
          && (lang_hooks.tree_inlining.auto_var_in_fn_p
611
              (TREE_OPERAND (*tp, 0), fn)))
612
        {
613
          /* Some assignments VAR = VAR; don't generate any rtl code
614
             and thus don't count as variable modification.  Avoid
615
             keeping bogosities like 0 = 0.  */
616
          tree decl = TREE_OPERAND (*tp, 0), value;
617
          splay_tree_node n;
618
 
619
          n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
620
          if (n)
621
            {
622
              value = (tree) n->value;
623
              STRIP_TYPE_NOPS (value);
624
              if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
625
                {
626
                  *tp = build_empty_stmt ();
627
                  return copy_body_r (tp, walk_subtrees, data);
628
                }
629
            }
630
        }
631
      else if (TREE_CODE (*tp) == INDIRECT_REF
632
               && !id->versioning_p)
633
        {
634
          /* Get rid of *& from inline substitutions that can happen when a
635
             pointer argument is an ADDR_EXPR.  */
636
          tree decl = TREE_OPERAND (*tp, 0);
637
          splay_tree_node n;
638
 
639
          n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
640
          if (n)
641
            {
642
              tree new;
643
              tree old;
644
              /* If we happen to get an ADDR_EXPR in n->value, strip
645
                 it manually here as we'll eventually get ADDR_EXPRs
646
                 which lie about their types pointed to.  In this case
647
                 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
648
                 but we absolutely rely on that.  As fold_indirect_ref
649
                 does other useful transformations, try that first, though.  */
650
              tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
651
              new = unshare_expr ((tree)n->value);
652
              old = *tp;
653
              *tp = fold_indirect_ref_1 (type, new);
654
              if (! *tp)
655
                {
656
                  if (TREE_CODE (new) == ADDR_EXPR)
657
                    *tp = TREE_OPERAND (new, 0);
658
                  else
659
                    {
660
                      *tp = build1 (INDIRECT_REF, type, new);
661
                      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
662
                    }
663
                }
664
              *walk_subtrees = 0;
665
              return NULL;
666
            }
667
        }
668
 
669
      /* Here is the "usual case".  Copy this tree node, and then
670
         tweak some special cases.  */
671
      copy_tree_r (tp, walk_subtrees, id->versioning_p ? data : NULL);
672
 
673
      /* If EXPR has block defined, map it to newly constructed block.
674
         When inlining we want EXPRs without block appear in the block
675
         of function call.  */
676
      if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
677
        {
678
          new_block = id->block;
679
          if (TREE_BLOCK (*tp))
680
            {
681
              splay_tree_node n;
682
              n = splay_tree_lookup (id->decl_map,
683
                                     (splay_tree_key) TREE_BLOCK (*tp));
684
              gcc_assert (n);
685
              new_block = (tree) n->value;
686
            }
687
          TREE_BLOCK (*tp) = new_block;
688
        }
689
 
690
      if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
691
        TREE_OPERAND (*tp, 0) =
692
          build_int_cst
693
            (NULL_TREE,
694
             id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
695
 
696
      TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
697
 
698
      /* The copied TARGET_EXPR has never been expanded, even if the
699
         original node was expanded already.  */
700
      if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
701
        {
702
          TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
703
          TREE_OPERAND (*tp, 3) = NULL_TREE;
704
        }
705
 
706
      /* Variable substitution need not be simple.  In particular, the
707
         INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
708
         and friends are up-to-date.  */
709
      else if (TREE_CODE (*tp) == ADDR_EXPR)
710
        {
711
          walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
712
          recompute_tree_invarant_for_addr_expr (*tp);
713
          *walk_subtrees = 0;
714
        }
715
    }
716
 
717
  /* Keep iterating.  */
718
  return NULL_TREE;
719
}
720
 
721
/* Copy basic block, scale profile accordingly.  Edges will be taken care of
722
   later  */
723
 
724
static basic_block
725
copy_bb (inline_data *id, basic_block bb, int frequency_scale, int count_scale)
726
{
727
  block_stmt_iterator bsi, copy_bsi;
728
  basic_block copy_basic_block;
729
 
730
  /* create_basic_block() will append every new block to
731
     basic_block_info automatically.  */
732
  copy_basic_block = create_basic_block (NULL, (void *) 0, bb->prev_bb->aux);
733
  copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
734
  copy_basic_block->frequency = (bb->frequency
735
                                     * frequency_scale / REG_BR_PROB_BASE);
736
  copy_bsi = bsi_start (copy_basic_block);
737
 
738
  for (bsi = bsi_start (bb);
739
       !bsi_end_p (bsi); bsi_next (&bsi))
740
    {
741
      tree stmt = bsi_stmt (bsi);
742
      tree orig_stmt = stmt;
743
 
744
      walk_tree (&stmt, copy_body_r, id, NULL);
745
 
746
      /* RETURN_EXPR might be removed,
747
         this is signalled by making stmt pointer NULL.  */
748
      if (stmt)
749
        {
750
          tree call, decl;
751
          bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
752
          call = get_call_expr_in (stmt);
753
          /* We're duplicating a CALL_EXPR.  Find any corresponding
754
             callgraph edges and update or duplicate them.  */
755
          if (call && (decl = get_callee_fndecl (call)))
756
            {
757
              if (id->saving_p)
758
                {
759
                  struct cgraph_node *node;
760
                  struct cgraph_edge *edge;
761
 
762
                  /* We're saving a copy of the body, so we'll update the
763
                     callgraph nodes in place.  Note that we avoid
764
                     altering the original callgraph node; we begin with
765
                     the first clone.  */
766
                  for (node = id->node->next_clone;
767
                       node;
768
                       node = node->next_clone)
769
                    {
770
                      edge = cgraph_edge (node, orig_stmt);
771
                      gcc_assert (edge);
772
                      edge->call_stmt = stmt;
773
                    }
774
                }
775
              else
776
                {
777
                  struct cgraph_edge *edge;
778
 
779
                  /* We're cloning or inlining this body; duplicate the
780
                     associate callgraph nodes.  */
781
                  if (!id->versioning_p)
782
                    {
783
                      edge = cgraph_edge (id->current_node, orig_stmt);
784
                      if (edge)
785
                        cgraph_clone_edge (edge, id->node, stmt,
786
                                           REG_BR_PROB_BASE, 1, true);
787
                    }
788
                }
789
              if (id->versioning_p)
790
                {
791
                  /* Update the call_expr on the edges from the new version
792
                     to its callees. */
793
                  struct cgraph_edge *edge;
794
                  edge = cgraph_edge (id->node, orig_stmt);
795
                  if (edge)
796
                    edge->call_stmt = stmt;
797
                }
798
            }
799
          /* If you think we can abort here, you are wrong.
800
             There is no region 0 in tree land.  */
801
          gcc_assert (lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt)
802
                      != 0);
803
 
804
          if (tree_could_throw_p (stmt))
805
            {
806
              int region = lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt);
807
              /* Add an entry for the copied tree in the EH hashtable.
808
                 When saving or cloning or versioning, use the hashtable in
809
                 cfun, and just copy the EH number.  When inlining, use the
810
                 hashtable in the caller, and adjust the region number.  */
811
              if (region > 0)
812
                add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
813
 
814
              /* If this tree doesn't have a region associated with it,
815
                 and there is a "current region,"
816
                 then associate this tree with the current region
817
                 and add edges associated with this region.  */
818
              if ((lookup_stmt_eh_region_fn (id->callee_cfun,
819
                                             orig_stmt) <= 0
820
                   && id->eh_region > 0)
821
                  && tree_could_throw_p (stmt))
822
                add_stmt_to_eh_region (stmt, id->eh_region);
823
            }
824
        }
825
    }
826
  return copy_basic_block;
827
}
828
 
829
/* Copy edges from BB into its copy constructed earlier, scale profile
830
   accordingly.  Edges will be taken care of later.  Assume aux
831
   pointers to point to the copies of each BB.  */
832
static void
833
copy_edges_for_bb (basic_block bb, int count_scale)
834
{
835
  basic_block new_bb = bb->aux;
836
  edge_iterator ei;
837
  edge old_edge;
838
  block_stmt_iterator bsi;
839
  int flags;
840
 
841
  /* Use the indices from the original blocks to create edges for the
842
     new ones.  */
843
  FOR_EACH_EDGE (old_edge, ei, bb->succs)
844
    if (!(old_edge->flags & EDGE_EH))
845
      {
846
        edge new;
847
 
848
        flags = old_edge->flags;
849
 
850
        /* Return edges do get a FALLTHRU flag when the get inlined.  */
851
        if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
852
            && old_edge->dest->aux != EXIT_BLOCK_PTR)
853
          flags |= EDGE_FALLTHRU;
854
        new = make_edge (new_bb, old_edge->dest->aux, flags);
855
        new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
856
        new->probability = old_edge->probability;
857
      }
858
 
859
  if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
860
    return;
861
 
862
  for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
863
    {
864
      tree copy_stmt;
865
 
866
      copy_stmt = bsi_stmt (bsi);
867
      update_stmt (copy_stmt);
868
      /* Do this before the possible split_block.  */
869
      bsi_next (&bsi);
870
 
871
      /* If this tree could throw an exception, there are two
872
         cases where we need to add abnormal edge(s): the
873
         tree wasn't in a region and there is a "current
874
         region" in the caller; or the original tree had
875
         EH edges.  In both cases split the block after the tree,
876
         and add abnormal edge(s) as needed; we need both
877
         those from the callee and the caller.
878
         We check whether the copy can throw, because the const
879
         propagation can change an INDIRECT_REF which throws
880
         into a COMPONENT_REF which doesn't.  If the copy
881
         can throw, the original could also throw.  */
882
 
883
      if (tree_can_throw_internal (copy_stmt))
884
        {
885
          if (!bsi_end_p (bsi))
886
            /* Note that bb's predecessor edges aren't necessarily
887
               right at this point; split_block doesn't care.  */
888
            {
889
              edge e = split_block (new_bb, copy_stmt);
890
              new_bb = e->dest;
891
              bsi = bsi_start (new_bb);
892
            }
893
 
894
           make_eh_edges (copy_stmt);
895
        }
896
    }
897
}
898
 
899
/* Wrapper for remap_decl so it can be used as a callback.  */
900
static tree
901
remap_decl_1 (tree decl, void *data)
902
{
903
  return remap_decl (decl, data);
904
}
905
 
906
/* Make a copy of the body of FN so that it can be inserted inline in
907
   another function.  Walks FN via CFG, returns new fndecl.  */
908
 
909
static tree
910
copy_cfg_body (inline_data * id, gcov_type count, int frequency,
911
               basic_block entry_block_map, basic_block exit_block_map)
912
{
913
  tree callee_fndecl = id->callee;
914
  /* Original cfun for the callee, doesn't change.  */
915
  struct function *callee_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
916
  /* Copy, built by this function.  */
917
  struct function *new_cfun;
918
  /* Place to copy from; when a copy of the function was saved off earlier,
919
     use that instead of the main copy.  */
920
  struct function *cfun_to_copy =
921
    (struct function *) ggc_alloc_cleared (sizeof (struct function));
922
  basic_block bb;
923
  tree new_fndecl = NULL;
924
  bool saving_or_cloning;
925
  int count_scale, frequency_scale;
926
 
927
  if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count)
928
    count_scale = (REG_BR_PROB_BASE * count
929
                   / ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count);
930
  else
931
    count_scale = 1;
932
 
933
  if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency)
934
    frequency_scale = (REG_BR_PROB_BASE * frequency
935
                       /
936
                       ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency);
937
  else
938
    frequency_scale = count_scale;
939
 
940
  /* Register specific tree functions.  */
941
  tree_register_cfg_hooks ();
942
 
943
  /* Must have a CFG here at this point.  */
944
  gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
945
              (DECL_STRUCT_FUNCTION (callee_fndecl)));
946
 
947
  *cfun_to_copy = *DECL_STRUCT_FUNCTION (callee_fndecl);
948
 
949
  /* If there is a saved_cfg+saved_args lurking in the
950
     struct function, a copy of the callee body was saved there, and
951
     the 'struct cgraph edge' nodes have been fudged to point into the
952
     saved body.  Accordingly, we want to copy that saved body so the
953
     callgraph edges will be recognized and cloned properly.  */
954
  if (cfun_to_copy->saved_cfg)
955
    {
956
      cfun_to_copy->cfg = cfun_to_copy->saved_cfg;
957
      cfun_to_copy->eh = cfun_to_copy->saved_eh;
958
    }
959
  id->callee_cfun = cfun_to_copy;
960
 
961
  /* If saving or cloning a function body, create new basic_block_info
962
     and label_to_block_maps.  Otherwise, we're duplicating a function
963
     body for inlining; insert our new blocks and labels into the
964
     existing varrays.  */
965
  saving_or_cloning = (id->saving_p || id->cloning_p || id->versioning_p);
966
  if (saving_or_cloning)
967
    {
968
      new_cfun =
969
        (struct function *) ggc_alloc_cleared (sizeof (struct function));
970
      *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
971
      new_cfun->cfg = NULL;
972
      new_cfun->decl = new_fndecl = copy_node (callee_fndecl);
973
      new_cfun->ib_boundaries_block = (varray_type) 0;
974
      DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
975
      push_cfun (new_cfun);
976
      init_empty_tree_cfg ();
977
 
978
      ENTRY_BLOCK_PTR->count =
979
        (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count * count_scale /
980
         REG_BR_PROB_BASE);
981
      ENTRY_BLOCK_PTR->frequency =
982
        (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency *
983
         frequency_scale / REG_BR_PROB_BASE);
984
      EXIT_BLOCK_PTR->count =
985
        (EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count * count_scale /
986
         REG_BR_PROB_BASE);
987
      EXIT_BLOCK_PTR->frequency =
988
        (EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency *
989
         frequency_scale / REG_BR_PROB_BASE);
990
 
991
      entry_block_map = ENTRY_BLOCK_PTR;
992
      exit_block_map = EXIT_BLOCK_PTR;
993
    }
994
 
995
  ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
996
  EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
997
 
998
 
999
  /* Duplicate any exception-handling regions.  */
1000
  if (cfun->eh)
1001
    {
1002
      if (saving_or_cloning)
1003
        init_eh_for_function ();
1004
      id->eh_region_offset = duplicate_eh_regions (cfun_to_copy,
1005
                                                   remap_decl_1,
1006
                                                   id, id->eh_region);
1007
      gcc_assert (inlining_p (id) || !id->eh_region_offset);
1008
    }
1009
  /* Use aux pointers to map the original blocks to copy.  */
1010
  FOR_EACH_BB_FN (bb, cfun_to_copy)
1011
    bb->aux = copy_bb (id, bb, frequency_scale, count_scale);
1012
  /* Now that we've duplicated the blocks, duplicate their edges.  */
1013
  FOR_ALL_BB_FN (bb, cfun_to_copy)
1014
    copy_edges_for_bb (bb, count_scale);
1015
  FOR_ALL_BB_FN (bb, cfun_to_copy)
1016
    bb->aux = NULL;
1017
 
1018
  if (saving_or_cloning)
1019
    pop_cfun ();
1020
 
1021
  return new_fndecl;
1022
}
1023
 
1024
/* Make a copy of the body of FN so that it can be inserted inline in
1025
   another function.  */
1026
 
1027
static tree
1028
copy_generic_body (inline_data *id)
1029
{
1030
  tree body;
1031
  tree fndecl = id->callee;
1032
 
1033
  body = DECL_SAVED_TREE (fndecl);
1034
  walk_tree (&body, copy_body_r, id, NULL);
1035
 
1036
  return body;
1037
}
1038
 
1039
static tree
1040
copy_body (inline_data *id, gcov_type count, int frequency,
1041
           basic_block entry_block_map, basic_block exit_block_map)
1042
{
1043
  tree fndecl = id->callee;
1044
  tree body;
1045
 
1046
  /* If this body has a CFG, walk CFG and copy.  */
1047
  gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1048
  body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1049
 
1050
  return body;
1051
}
1052
 
1053
/* Return true if VALUE is an ADDR_EXPR of an automatic variable
1054
   defined in function FN, or of a data member thereof.  */
1055
 
1056
static bool
1057
self_inlining_addr_expr (tree value, tree fn)
1058
{
1059
  tree var;
1060
 
1061
  if (TREE_CODE (value) != ADDR_EXPR)
1062
    return false;
1063
 
1064
  var = get_base_address (TREE_OPERAND (value, 0));
1065
 
1066
  return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
1067
}
1068
 
1069
static void
1070
setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
1071
                     basic_block bb, tree *vars)
1072
{
1073
  tree init_stmt;
1074
  tree var;
1075
  tree var_sub;
1076
 
1077
  /* If the parameter is never assigned to, we may not need to
1078
     create a new variable here at all.  Instead, we may be able
1079
     to just use the argument value.  */
1080
  if (TREE_READONLY (p)
1081
      && !TREE_ADDRESSABLE (p)
1082
      && value && !TREE_SIDE_EFFECTS (value))
1083
    {
1084
      /* We may produce non-gimple trees by adding NOPs or introduce
1085
         invalid sharing when operand is not really constant.
1086
         It is not big deal to prohibit constant propagation here as
1087
         we will constant propagate in DOM1 pass anyway.  */
1088
      if (is_gimple_min_invariant (value)
1089
          && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
1090
          /* We have to be very careful about ADDR_EXPR.  Make sure
1091
             the base variable isn't a local variable of the inlined
1092
             function, e.g., when doing recursive inlining, direct or
1093
             mutually-recursive or whatever, which is why we don't
1094
             just test whether fn == current_function_decl.  */
1095
          && ! self_inlining_addr_expr (value, fn))
1096
        {
1097
          insert_decl_map (id, p, value);
1098
          return;
1099
        }
1100
    }
1101
 
1102
  /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
1103
     here since the type of this decl must be visible to the calling
1104
     function.  */
1105
  var = copy_decl_for_dup (p, fn, id->caller, /*versioning=*/false);
1106
 
1107
  /* See if the frontend wants to pass this by invisible reference.  If
1108
     so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1109
     replace uses of the PARM_DECL with dereferences.  */
1110
  if (TREE_TYPE (var) != TREE_TYPE (p)
1111
      && POINTER_TYPE_P (TREE_TYPE (var))
1112
      && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1113
    {
1114
      insert_decl_map (id, var, var);
1115
      var_sub = build_fold_indirect_ref (var);
1116
    }
1117
  else
1118
    var_sub = var;
1119
 
1120
  /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1121
     that way, when the PARM_DECL is encountered, it will be
1122
     automatically replaced by the VAR_DECL.  */
1123
  insert_decl_map (id, p, var_sub);
1124
 
1125
  /* Declare this new variable.  */
1126
  TREE_CHAIN (var) = *vars;
1127
  *vars = var;
1128
 
1129
  /* Make gimplifier happy about this variable.  */
1130
  DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1131
 
1132
  /* Even if P was TREE_READONLY, the new VAR should not be.
1133
     In the original code, we would have constructed a
1134
     temporary, and then the function body would have never
1135
     changed the value of P.  However, now, we will be
1136
     constructing VAR directly.  The constructor body may
1137
     change its value multiple times as it is being
1138
     constructed.  Therefore, it must not be TREE_READONLY;
1139
     the back-end assumes that TREE_READONLY variable is
1140
     assigned to only once.  */
1141
  if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1142
    TREE_READONLY (var) = 0;
1143
 
1144
  /* Initialize this VAR_DECL from the equivalent argument.  Convert
1145
     the argument to the proper type in case it was promoted.  */
1146
  if (value)
1147
    {
1148
      tree rhs = fold_convert (TREE_TYPE (var), value);
1149
      block_stmt_iterator bsi = bsi_last (bb);
1150
 
1151
      if (rhs == error_mark_node)
1152
        return;
1153
 
1154
      /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
1155
         keep our trees in gimple form.  */
1156
      init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
1157
 
1158
      /* If we did not create a gimple value and we did not create a gimple
1159
         cast of a gimple value, then we will need to gimplify INIT_STMTS
1160
         at the end.  Note that is_gimple_cast only checks the outer
1161
         tree code, not its operand.  Thus the explicit check that its
1162
         operand is a gimple value.  */
1163
      if (!is_gimple_val (rhs)
1164
          && (!is_gimple_cast (rhs)
1165
              || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1166
        gimplify_stmt (&init_stmt);
1167
 
1168
      /* If VAR represents a zero-sized variable, it's possible that the
1169
         assignment statment may result in no gimple statements.  */
1170
      if (init_stmt)
1171
        bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1172
    }
1173
}
1174
 
1175
/* Generate code to initialize the parameters of the function at the
1176
   top of the stack in ID from the ARGS (presented as a TREE_LIST).  */
1177
 
1178
static void
1179
initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
1180
                               tree fn, basic_block bb)
1181
{
1182
  tree parms;
1183
  tree a;
1184
  tree p;
1185
  tree vars = NULL_TREE;
1186
  int argnum = 0;
1187
 
1188
  /* Figure out what the parameters are.  */
1189
  parms = DECL_ARGUMENTS (fn);
1190
  if (fn == current_function_decl)
1191
    parms = cfun->saved_args;
1192
 
1193
  /* Loop through the parameter declarations, replacing each with an
1194
     equivalent VAR_DECL, appropriately initialized.  */
1195
  for (p = parms, a = args; p;
1196
       a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
1197
    {
1198
      tree value;
1199
 
1200
      ++argnum;
1201
 
1202
      /* Find the initializer.  */
1203
      value = lang_hooks.tree_inlining.convert_parm_for_inlining
1204
              (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
1205
 
1206
      setup_one_parameter (id, p, value, fn, bb, &vars);
1207
    }
1208
 
1209
  /* Initialize the static chain.  */
1210
  p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1211
  if (fn == current_function_decl)
1212
    p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
1213
  if (p)
1214
    {
1215
      /* No static chain?  Seems like a bug in tree-nested.c.  */
1216
      gcc_assert (static_chain);
1217
 
1218
      setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1219
    }
1220
 
1221
  declare_inline_vars (id->block, vars);
1222
}
1223
 
1224
/* Declare a return variable to replace the RESULT_DECL for the
1225
   function we are calling.  An appropriate DECL_STMT is returned.
1226
   The USE_STMT is filled to contain a use of the declaration to
1227
   indicate the return value of the function.
1228
 
1229
   RETURN_SLOT_ADDR, if non-null, was a fake parameter that
1230
   took the address of the result.  MODIFY_DEST, if non-null, was the LHS of
1231
   the MODIFY_EXPR to which this call is the RHS.
1232
 
1233
   The return value is a (possibly null) value that is the result of the
1234
   function as seen by the callee.  *USE_P is a (possibly null) value that
1235
   holds the result as seen by the caller.  */
1236
 
1237
static tree
1238
declare_return_variable (inline_data *id, tree return_slot_addr,
1239
                         tree modify_dest, tree *use_p)
1240
{
1241
  tree callee = id->callee;
1242
  tree caller = id->caller;
1243
  tree result = DECL_RESULT (callee);
1244
  tree callee_type = TREE_TYPE (result);
1245
  tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1246
  tree var, use;
1247
 
1248
  /* We don't need to do anything for functions that don't return
1249
     anything.  */
1250
  if (!result || VOID_TYPE_P (callee_type))
1251
    {
1252
      *use_p = NULL_TREE;
1253
      return NULL_TREE;
1254
    }
1255
 
1256
  /* If there was a return slot, then the return value is the
1257
     dereferenced address of that object.  */
1258
  if (return_slot_addr)
1259
    {
1260
      /* The front end shouldn't have used both return_slot_addr and
1261
         a modify expression.  */
1262
      gcc_assert (!modify_dest);
1263
      if (DECL_BY_REFERENCE (result))
1264
        var = return_slot_addr;
1265
      else
1266
        var = build_fold_indirect_ref (return_slot_addr);
1267
      if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1268
          && !DECL_COMPLEX_GIMPLE_REG_P (result)
1269
          && DECL_P (var))
1270
        DECL_COMPLEX_GIMPLE_REG_P (var) = 0;
1271
      use = NULL;
1272
      goto done;
1273
    }
1274
 
1275
  /* All types requiring non-trivial constructors should have been handled.  */
1276
  gcc_assert (!TREE_ADDRESSABLE (callee_type));
1277
 
1278
  /* Attempt to avoid creating a new temporary variable.  */
1279
  if (modify_dest)
1280
    {
1281
      bool use_it = false;
1282
 
1283
      /* We can't use MODIFY_DEST if there's type promotion involved.  */
1284
      if (!lang_hooks.types_compatible_p (caller_type, callee_type))
1285
        use_it = false;
1286
 
1287
      /* ??? If we're assigning to a variable sized type, then we must
1288
         reuse the destination variable, because we've no good way to
1289
         create variable sized temporaries at this point.  */
1290
      else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1291
        use_it = true;
1292
 
1293
      /* If the callee cannot possibly modify MODIFY_DEST, then we can
1294
         reuse it as the result of the call directly.  Don't do this if
1295
         it would promote MODIFY_DEST to addressable.  */
1296
      else if (TREE_ADDRESSABLE (result))
1297
        use_it = false;
1298
      else
1299
        {
1300
          tree base_m = get_base_address (modify_dest);
1301
 
1302
          /* If the base isn't a decl, then it's a pointer, and we don't
1303
             know where that's going to go.  */
1304
          if (!DECL_P (base_m))
1305
            use_it = false;
1306
          else if (is_global_var (base_m))
1307
            use_it = false;
1308
          else if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1309
                   && !DECL_COMPLEX_GIMPLE_REG_P (result)
1310
                   && DECL_COMPLEX_GIMPLE_REG_P (base_m))
1311
            use_it = false;
1312
          else if (!TREE_ADDRESSABLE (base_m))
1313
            use_it = true;
1314
        }
1315
 
1316
      if (use_it)
1317
        {
1318
          var = modify_dest;
1319
          use = NULL;
1320
          goto done;
1321
        }
1322
    }
1323
 
1324
  gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1325
 
1326
  var = copy_decl_for_dup (result, callee, caller, /*versioning=*/false);
1327
 
1328
  DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1329
  DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1330
    = tree_cons (NULL_TREE, var,
1331
                 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1332
 
1333
  /* Do not have the rest of GCC warn about this variable as it should
1334
     not be visible to the user.  */
1335
  TREE_NO_WARNING (var) = 1;
1336
 
1337
  /* Build the use expr.  If the return type of the function was
1338
     promoted, convert it back to the expected type.  */
1339
  use = var;
1340
  if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
1341
    use = fold_convert (caller_type, var);
1342
 
1343
 done:
1344
  /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1345
     way, when the RESULT_DECL is encountered, it will be
1346
     automatically replaced by the VAR_DECL.  */
1347
  insert_decl_map (id, result, var);
1348
 
1349
  /* Remember this so we can ignore it in remap_decls.  */
1350
  id->retvar = var;
1351
 
1352
  *use_p = use;
1353
  return var;
1354
}
1355
 
1356
/* Returns nonzero if a function can be inlined as a tree.  */
1357
 
1358
bool
1359
tree_inlinable_function_p (tree fn)
1360
{
1361
  return inlinable_function_p (fn);
1362
}
1363
 
1364
static const char *inline_forbidden_reason;
1365
 
1366
static tree
1367
inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1368
                      void *fnp)
1369
{
1370
  tree node = *nodep;
1371
  tree fn = (tree) fnp;
1372
  tree t;
1373
 
1374
  switch (TREE_CODE (node))
1375
    {
1376
    case CALL_EXPR:
1377
      /* Refuse to inline alloca call unless user explicitly forced so as
1378
         this may change program's memory overhead drastically when the
1379
         function using alloca is called in loop.  In GCC present in
1380
         SPEC2000 inlining into schedule_block cause it to require 2GB of
1381
         RAM instead of 256MB.  */
1382
      if (alloca_call_p (node)
1383
          && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1384
        {
1385
          inline_forbidden_reason
1386
            = G_("function %q+F can never be inlined because it uses "
1387
                 "alloca (override using the always_inline attribute)");
1388
          return node;
1389
        }
1390
      t = get_callee_fndecl (node);
1391
      if (! t)
1392
        break;
1393
 
1394
      /* We cannot inline functions that call setjmp.  */
1395
      if (setjmp_call_p (t))
1396
        {
1397
          inline_forbidden_reason
1398
            = G_("function %q+F can never be inlined because it uses setjmp");
1399
          return node;
1400
        }
1401
 
1402
      if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1403
        switch (DECL_FUNCTION_CODE (t))
1404
          {
1405
            /* We cannot inline functions that take a variable number of
1406
               arguments.  */
1407
          case BUILT_IN_VA_START:
1408
          case BUILT_IN_STDARG_START:
1409
          case BUILT_IN_NEXT_ARG:
1410
          case BUILT_IN_VA_END:
1411
            inline_forbidden_reason
1412
              = G_("function %q+F can never be inlined because it "
1413
                   "uses variable argument lists");
1414
            return node;
1415
 
1416
          case BUILT_IN_LONGJMP:
1417
            /* We can't inline functions that call __builtin_longjmp at
1418
               all.  The non-local goto machinery really requires the
1419
               destination be in a different function.  If we allow the
1420
               function calling __builtin_longjmp to be inlined into the
1421
               function calling __builtin_setjmp, Things will Go Awry.  */
1422
            inline_forbidden_reason
1423
              = G_("function %q+F can never be inlined because "
1424
                   "it uses setjmp-longjmp exception handling");
1425
            return node;
1426
 
1427
          case BUILT_IN_NONLOCAL_GOTO:
1428
            /* Similarly.  */
1429
            inline_forbidden_reason
1430
              = G_("function %q+F can never be inlined because "
1431
                   "it uses non-local goto");
1432
            return node;
1433
 
1434
          case BUILT_IN_RETURN:
1435
          case BUILT_IN_APPLY_ARGS:
1436
            /* If a __builtin_apply_args caller would be inlined,
1437
               it would be saving arguments of the function it has
1438
               been inlined into.  Similarly __builtin_return would
1439
               return from the function the inline has been inlined into.  */
1440
            inline_forbidden_reason
1441
              = G_("function %q+F can never be inlined because "
1442
                   "it uses __builtin_return or __builtin_apply_args");
1443
            return node;
1444
 
1445
          default:
1446
            break;
1447
          }
1448
      break;
1449
 
1450
    case GOTO_EXPR:
1451
      t = TREE_OPERAND (node, 0);
1452
 
1453
      /* We will not inline a function which uses computed goto.  The
1454
         addresses of its local labels, which may be tucked into
1455
         global storage, are of course not constant across
1456
         instantiations, which causes unexpected behavior.  */
1457
      if (TREE_CODE (t) != LABEL_DECL)
1458
        {
1459
          inline_forbidden_reason
1460
            = G_("function %q+F can never be inlined "
1461
                 "because it contains a computed goto");
1462
          return node;
1463
        }
1464
      break;
1465
 
1466
    case LABEL_EXPR:
1467
      t = TREE_OPERAND (node, 0);
1468
      if (DECL_NONLOCAL (t))
1469
        {
1470
          /* We cannot inline a function that receives a non-local goto
1471
             because we cannot remap the destination label used in the
1472
             function that is performing the non-local goto.  */
1473
          inline_forbidden_reason
1474
            = G_("function %q+F can never be inlined "
1475
                 "because it receives a non-local goto");
1476
          return node;
1477
        }
1478
      break;
1479
 
1480
    case RECORD_TYPE:
1481
    case UNION_TYPE:
1482
      /* We cannot inline a function of the form
1483
 
1484
           void F (int i) { struct S { int ar[i]; } s; }
1485
 
1486
         Attempting to do so produces a catch-22.
1487
         If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1488
         UNION_TYPE nodes, then it goes into infinite recursion on a
1489
         structure containing a pointer to its own type.  If it doesn't,
1490
         then the type node for S doesn't get adjusted properly when
1491
         F is inlined.
1492
 
1493
         ??? This is likely no longer true, but it's too late in the 4.0
1494
         cycle to try to find out.  This should be checked for 4.1.  */
1495
      for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1496
        if (variably_modified_type_p (TREE_TYPE (t), NULL))
1497
          {
1498
            inline_forbidden_reason
1499
              = G_("function %q+F can never be inlined "
1500
                   "because it uses variable sized variables");
1501
            return node;
1502
          }
1503
 
1504
    default:
1505
      break;
1506
    }
1507
 
1508
  return NULL_TREE;
1509
}
1510
 
1511
/* Return subexpression representing possible alloca call, if any.  */
1512
static tree
1513
inline_forbidden_p (tree fndecl)
1514
{
1515
  location_t saved_loc = input_location;
1516
  block_stmt_iterator bsi;
1517
  basic_block bb;
1518
  tree ret = NULL_TREE;
1519
 
1520
  FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1521
    for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1522
      {
1523
        ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1524
                                    inline_forbidden_p_1, fndecl);
1525
        if (ret)
1526
          goto egress;
1527
      }
1528
 
1529
egress:
1530
  input_location = saved_loc;
1531
  return ret;
1532
}
1533
 
1534
/* Returns nonzero if FN is a function that does not have any
1535
   fundamental inline blocking properties.  */
1536
 
1537
static bool
1538
inlinable_function_p (tree fn)
1539
{
1540
  bool inlinable = true;
1541
 
1542
  /* If we've already decided this function shouldn't be inlined,
1543
     there's no need to check again.  */
1544
  if (DECL_UNINLINABLE (fn))
1545
    return false;
1546
 
1547
  /* See if there is any language-specific reason it cannot be
1548
     inlined.  (It is important that this hook be called early because
1549
     in C++ it may result in template instantiation.)
1550
     If the function is not inlinable for language-specific reasons,
1551
     it is left up to the langhook to explain why.  */
1552
  inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1553
 
1554
  /* If we don't have the function body available, we can't inline it.
1555
     However, this should not be recorded since we also get here for
1556
     forward declared inline functions.  Therefore, return at once.  */
1557
  if (!DECL_SAVED_TREE (fn))
1558
    return false;
1559
 
1560
  /* If we're not inlining at all, then we cannot inline this function.  */
1561
  else if (!flag_inline_trees)
1562
    inlinable = false;
1563
 
1564
  /* Only try to inline functions if DECL_INLINE is set.  This should be
1565
     true for all functions declared `inline', and for all other functions
1566
     as well with -finline-functions.
1567
 
1568
     Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1569
     it's the front-end that must set DECL_INLINE in this case, because
1570
     dwarf2out loses if a function that does not have DECL_INLINE set is
1571
     inlined anyway.  That is why we have both DECL_INLINE and
1572
     DECL_DECLARED_INLINE_P.  */
1573
  /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1574
            here should be redundant.  */
1575
  else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1576
    inlinable = false;
1577
 
1578
  else if (inline_forbidden_p (fn))
1579
    {
1580
      /* See if we should warn about uninlinable functions.  Previously,
1581
         some of these warnings would be issued while trying to expand
1582
         the function inline, but that would cause multiple warnings
1583
         about functions that would for example call alloca.  But since
1584
         this a property of the function, just one warning is enough.
1585
         As a bonus we can now give more details about the reason why a
1586
         function is not inlinable.
1587
         We only warn for functions declared `inline' by the user.  */
1588
      bool do_warning = (warn_inline
1589
                         && DECL_INLINE (fn)
1590
                         && DECL_DECLARED_INLINE_P (fn)
1591
                         && !DECL_IN_SYSTEM_HEADER (fn));
1592
 
1593
      if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1594
        sorry (inline_forbidden_reason, fn);
1595
      else if (do_warning)
1596
        warning (OPT_Winline, inline_forbidden_reason, fn);
1597
 
1598
      inlinable = false;
1599
    }
1600
 
1601
  /* Squirrel away the result so that we don't have to check again.  */
1602
  DECL_UNINLINABLE (fn) = !inlinable;
1603
 
1604
  return inlinable;
1605
}
1606
 
1607
/* Estimate the cost of a memory move.  Use machine dependent
1608
   word size and take possible memcpy call into account.  */
1609
 
1610
int
1611
estimate_move_cost (tree type)
1612
{
1613
  HOST_WIDE_INT size;
1614
 
1615
  size = int_size_in_bytes (type);
1616
 
1617
  if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1618
    /* Cost of a memcpy call, 3 arguments and the call.  */
1619
    return 4;
1620
  else
1621
    return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1622
}
1623
 
1624
/* Used by estimate_num_insns.  Estimate number of instructions seen
1625
   by given statement.  */
1626
 
1627
static tree
1628
estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1629
{
1630
  int *count = data;
1631
  tree x = *tp;
1632
 
1633
  if (IS_TYPE_OR_DECL_P (x))
1634
    {
1635
      *walk_subtrees = 0;
1636
      return NULL;
1637
    }
1638
  /* Assume that constants and references counts nothing.  These should
1639
     be majorized by amount of operations among them we count later
1640
     and are common target of CSE and similar optimizations.  */
1641
  else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1642
    return NULL;
1643
 
1644
  switch (TREE_CODE (x))
1645
    {
1646
    /* Containers have no cost.  */
1647
    case TREE_LIST:
1648
    case TREE_VEC:
1649
    case BLOCK:
1650
    case COMPONENT_REF:
1651
    case BIT_FIELD_REF:
1652
    case INDIRECT_REF:
1653
    case ALIGN_INDIRECT_REF:
1654
    case MISALIGNED_INDIRECT_REF:
1655
    case ARRAY_REF:
1656
    case ARRAY_RANGE_REF:
1657
    case OBJ_TYPE_REF:
1658
    case EXC_PTR_EXPR: /* ??? */
1659
    case FILTER_EXPR: /* ??? */
1660
    case COMPOUND_EXPR:
1661
    case BIND_EXPR:
1662
    case WITH_CLEANUP_EXPR:
1663
    case NOP_EXPR:
1664
    case VIEW_CONVERT_EXPR:
1665
    case SAVE_EXPR:
1666
    case ADDR_EXPR:
1667
    case COMPLEX_EXPR:
1668
    case RANGE_EXPR:
1669
    case CASE_LABEL_EXPR:
1670
    case SSA_NAME:
1671
    case CATCH_EXPR:
1672
    case EH_FILTER_EXPR:
1673
    case STATEMENT_LIST:
1674
    case ERROR_MARK:
1675
    case NON_LVALUE_EXPR:
1676
    case FDESC_EXPR:
1677
    case VA_ARG_EXPR:
1678
    case TRY_CATCH_EXPR:
1679
    case TRY_FINALLY_EXPR:
1680
    case LABEL_EXPR:
1681
    case GOTO_EXPR:
1682
    case RETURN_EXPR:
1683
    case EXIT_EXPR:
1684
    case LOOP_EXPR:
1685
    case PHI_NODE:
1686
    case WITH_SIZE_EXPR:
1687
      break;
1688
 
1689
    /* We don't account constants for now.  Assume that the cost is amortized
1690
       by operations that do use them.  We may re-consider this decision once
1691
       we are able to optimize the tree before estimating its size and break
1692
       out static initializers.  */
1693
    case IDENTIFIER_NODE:
1694
    case INTEGER_CST:
1695
    case REAL_CST:
1696
    case COMPLEX_CST:
1697
    case VECTOR_CST:
1698
    case STRING_CST:
1699
      *walk_subtrees = 0;
1700
      return NULL;
1701
 
1702
    /* Try to estimate the cost of assignments.  We have three cases to
1703
       deal with:
1704
        1) Simple assignments to registers;
1705
        2) Stores to things that must live in memory.  This includes
1706
           "normal" stores to scalars, but also assignments of large
1707
           structures, or constructors of big arrays;
1708
        3) TARGET_EXPRs.
1709
 
1710
       Let us look at the first two cases, assuming we have "a = b + C":
1711
       <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
1712
       If "a" is a GIMPLE register, the assignment to it is free on almost
1713
       any target, because "a" usually ends up in a real register.  Hence
1714
       the only cost of this expression comes from the PLUS_EXPR, and we
1715
       can ignore the MODIFY_EXPR.
1716
       If "a" is not a GIMPLE register, the assignment to "a" will most
1717
       likely be a real store, so the cost of the MODIFY_EXPR is the cost
1718
       of moving something into "a", which we compute using the function
1719
       estimate_move_cost.
1720
 
1721
       The third case deals with TARGET_EXPRs, for which the semantics are
1722
       that a temporary is assigned, unless the TARGET_EXPR itself is being
1723
       assigned to something else.  In the latter case we do not need the
1724
       temporary.  E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
1725
       MODIFY_EXPR is free.  */
1726
    case INIT_EXPR:
1727
    case MODIFY_EXPR:
1728
      /* Is the right and side a TARGET_EXPR?  */
1729
      if (TREE_CODE (TREE_OPERAND (x, 1)) == TARGET_EXPR)
1730
        break;
1731
      /* ... fall through ...  */
1732
 
1733
    case TARGET_EXPR:
1734
      x = TREE_OPERAND (x, 0);
1735
      /* Is this an assignments to a register?  */
1736
      if (is_gimple_reg (x))
1737
        break;
1738
      /* Otherwise it's a store, so fall through to compute the move cost.  */
1739
 
1740
    case CONSTRUCTOR:
1741
      *count += estimate_move_cost (TREE_TYPE (x));
1742
      break;
1743
 
1744
    /* Assign cost of 1 to usual operations.
1745
       ??? We may consider mapping RTL costs to this.  */
1746
    case COND_EXPR:
1747
    case VEC_COND_EXPR:
1748
 
1749
    case PLUS_EXPR:
1750
    case MINUS_EXPR:
1751
    case MULT_EXPR:
1752
 
1753
    case FIX_TRUNC_EXPR:
1754
    case FIX_CEIL_EXPR:
1755
    case FIX_FLOOR_EXPR:
1756
    case FIX_ROUND_EXPR:
1757
 
1758
    case NEGATE_EXPR:
1759
    case FLOAT_EXPR:
1760
    case MIN_EXPR:
1761
    case MAX_EXPR:
1762
    case ABS_EXPR:
1763
 
1764
    case LSHIFT_EXPR:
1765
    case RSHIFT_EXPR:
1766
    case LROTATE_EXPR:
1767
    case RROTATE_EXPR:
1768
    case VEC_LSHIFT_EXPR:
1769
    case VEC_RSHIFT_EXPR:
1770
 
1771
    case BIT_IOR_EXPR:
1772
    case BIT_XOR_EXPR:
1773
    case BIT_AND_EXPR:
1774
    case BIT_NOT_EXPR:
1775
 
1776
    case TRUTH_ANDIF_EXPR:
1777
    case TRUTH_ORIF_EXPR:
1778
    case TRUTH_AND_EXPR:
1779
    case TRUTH_OR_EXPR:
1780
    case TRUTH_XOR_EXPR:
1781
    case TRUTH_NOT_EXPR:
1782
 
1783
    case LT_EXPR:
1784
    case LE_EXPR:
1785
    case GT_EXPR:
1786
    case GE_EXPR:
1787
    case EQ_EXPR:
1788
    case NE_EXPR:
1789
    case ORDERED_EXPR:
1790
    case UNORDERED_EXPR:
1791
 
1792
    case UNLT_EXPR:
1793
    case UNLE_EXPR:
1794
    case UNGT_EXPR:
1795
    case UNGE_EXPR:
1796
    case UNEQ_EXPR:
1797
    case LTGT_EXPR:
1798
 
1799
    case CONVERT_EXPR:
1800
 
1801
    case CONJ_EXPR:
1802
 
1803
    case PREDECREMENT_EXPR:
1804
    case PREINCREMENT_EXPR:
1805
    case POSTDECREMENT_EXPR:
1806
    case POSTINCREMENT_EXPR:
1807
 
1808
    case SWITCH_EXPR:
1809
 
1810
    case ASM_EXPR:
1811
 
1812
    case REALIGN_LOAD_EXPR:
1813
 
1814
    case REDUC_MAX_EXPR:
1815
    case REDUC_MIN_EXPR:
1816
    case REDUC_PLUS_EXPR:
1817
 
1818
    case RESX_EXPR:
1819
      *count += 1;
1820
      break;
1821
 
1822
    /* Few special cases of expensive operations.  This is useful
1823
       to avoid inlining on functions having too many of these.  */
1824
    case TRUNC_DIV_EXPR:
1825
    case CEIL_DIV_EXPR:
1826
    case FLOOR_DIV_EXPR:
1827
    case ROUND_DIV_EXPR:
1828
    case EXACT_DIV_EXPR:
1829
    case TRUNC_MOD_EXPR:
1830
    case CEIL_MOD_EXPR:
1831
    case FLOOR_MOD_EXPR:
1832
    case ROUND_MOD_EXPR:
1833
    case RDIV_EXPR:
1834
      *count += 10;
1835
      break;
1836
    case CALL_EXPR:
1837
      {
1838
        tree decl = get_callee_fndecl (x);
1839
        tree arg;
1840
 
1841
        if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1842
          switch (DECL_FUNCTION_CODE (decl))
1843
            {
1844
            case BUILT_IN_CONSTANT_P:
1845
              *walk_subtrees = 0;
1846
              return NULL_TREE;
1847
            case BUILT_IN_EXPECT:
1848
              return NULL_TREE;
1849
            default:
1850
              break;
1851
            }
1852
 
1853
        /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
1854
           that does use function declaration to figure out the arguments.  */
1855
        if (!decl)
1856
          {
1857
            for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
1858
              *count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
1859
          }
1860
        else
1861
          {
1862
            for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1863
              *count += estimate_move_cost (TREE_TYPE (arg));
1864
          }
1865
 
1866
        *count += PARAM_VALUE (PARAM_INLINE_CALL_COST);
1867
        break;
1868
      }
1869
    default:
1870
      gcc_unreachable ();
1871
    }
1872
  return NULL;
1873
}
1874
 
1875
/* Estimate number of instructions that will be created by expanding EXPR.  */
1876
 
1877
int
1878
estimate_num_insns (tree expr)
1879
{
1880
  int num = 0;
1881
  struct pointer_set_t *visited_nodes;
1882
  basic_block bb;
1883
  block_stmt_iterator bsi;
1884
  struct function *my_function;
1885
 
1886
  /* If we're given an entire function, walk the CFG.  */
1887
  if (TREE_CODE (expr) == FUNCTION_DECL)
1888
    {
1889
      my_function = DECL_STRUCT_FUNCTION (expr);
1890
      gcc_assert (my_function && my_function->cfg);
1891
      visited_nodes = pointer_set_create ();
1892
      FOR_EACH_BB_FN (bb, my_function)
1893
        {
1894
          for (bsi = bsi_start (bb);
1895
               !bsi_end_p (bsi);
1896
               bsi_next (&bsi))
1897
            {
1898
              walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
1899
                         &num, visited_nodes);
1900
            }
1901
        }
1902
      pointer_set_destroy (visited_nodes);
1903
    }
1904
  else
1905
    walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1906
 
1907
  return num;
1908
}
1909
 
1910
typedef struct function *function_p;
1911
 
1912
DEF_VEC_P(function_p);
1913
DEF_VEC_ALLOC_P(function_p,heap);
1914
 
1915
/* Initialized with NOGC, making this poisonous to the garbage collector.  */
1916
static VEC(function_p,heap) *cfun_stack;
1917
 
1918
void
1919
push_cfun (struct function *new_cfun)
1920
{
1921
  VEC_safe_push (function_p, heap, cfun_stack, cfun);
1922
  cfun = new_cfun;
1923
}
1924
 
1925
void
1926
pop_cfun (void)
1927
{
1928
  cfun = VEC_pop (function_p, cfun_stack);
1929
}
1930
 
1931
/* Install new lexical TREE_BLOCK underneath 'current_block'.  */
1932
static void
1933
add_lexical_block (tree current_block, tree new_block)
1934
{
1935
  tree *blk_p;
1936
 
1937
  /* Walk to the last sub-block.  */
1938
  for (blk_p = &BLOCK_SUBBLOCKS (current_block);
1939
       *blk_p;
1940
       blk_p = &TREE_CHAIN (*blk_p))
1941
    ;
1942
  *blk_p = new_block;
1943
  BLOCK_SUPERCONTEXT (new_block) = current_block;
1944
}
1945
 
1946
/* If *TP is a CALL_EXPR, replace it with its inline expansion.  */
1947
 
1948
static bool
1949
expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
1950
{
1951
  inline_data *id;
1952
  tree t;
1953
  tree use_retvar;
1954
  tree fn;
1955
  splay_tree st;
1956
  tree args;
1957
  tree return_slot_addr;
1958
  tree modify_dest;
1959
  location_t saved_location;
1960
  struct cgraph_edge *cg_edge;
1961
  const char *reason;
1962
  basic_block return_block;
1963
  edge e;
1964
  block_stmt_iterator bsi, stmt_bsi;
1965
  bool successfully_inlined = FALSE;
1966
  tree t_step;
1967
  tree var;
1968
  struct cgraph_node *old_node;
1969
  tree decl;
1970
 
1971
  /* See what we've got.  */
1972
  id = (inline_data *) data;
1973
  t = *tp;
1974
 
1975
  /* Set input_location here so we get the right instantiation context
1976
     if we call instantiate_decl from inlinable_function_p.  */
1977
  saved_location = input_location;
1978
  if (EXPR_HAS_LOCATION (t))
1979
    input_location = EXPR_LOCATION (t);
1980
 
1981
  /* From here on, we're only interested in CALL_EXPRs.  */
1982
  if (TREE_CODE (t) != CALL_EXPR)
1983
    goto egress;
1984
 
1985
  /* First, see if we can figure out what function is being called.
1986
     If we cannot, then there is no hope of inlining the function.  */
1987
  fn = get_callee_fndecl (t);
1988
  if (!fn)
1989
    goto egress;
1990
 
1991
  /* Turn forward declarations into real ones.  */
1992
  fn = cgraph_node (fn)->decl;
1993
 
1994
  /* If fn is a declaration of a function in a nested scope that was
1995
     globally declared inline, we don't set its DECL_INITIAL.
1996
     However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1997
     C++ front-end uses it for cdtors to refer to their internal
1998
     declarations, that are not real functions.  Fortunately those
1999
     don't have trees to be saved, so we can tell by checking their
2000
     DECL_SAVED_TREE.  */
2001
  if (! DECL_INITIAL (fn)
2002
      && DECL_ABSTRACT_ORIGIN (fn)
2003
      && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
2004
    fn = DECL_ABSTRACT_ORIGIN (fn);
2005
 
2006
  /* Objective C and fortran still calls tree_rest_of_compilation directly.
2007
     Kill this check once this is fixed.  */
2008
  if (!id->current_node->analyzed)
2009
    goto egress;
2010
 
2011
  cg_edge = cgraph_edge (id->current_node, stmt);
2012
 
2013
  /* Constant propagation on argument done during previous inlining
2014
     may create new direct call.  Produce an edge for it.  */
2015
  if (!cg_edge)
2016
    {
2017
      struct cgraph_node *dest = cgraph_node (fn);
2018
 
2019
      /* We have missing edge in the callgraph.  This can happen in one case
2020
         where previous inlining turned indirect call into direct call by
2021
         constant propagating arguments.  In all other cases we hit a bug
2022
         (incorrect node sharing is most common reason for missing edges.  */
2023
      gcc_assert (dest->needed || !flag_unit_at_a_time);
2024
      cgraph_create_edge (id->node, dest, stmt,
2025
                          bb->count, bb->loop_depth)->inline_failed
2026
        = N_("originally indirect function call not considered for inlining");
2027
      goto egress;
2028
    }
2029
 
2030
  /* Don't try to inline functions that are not well-suited to
2031
     inlining.  */
2032
  if (!cgraph_inline_p (cg_edge, &reason))
2033
    {
2034
      if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
2035
          /* Avoid warnings during early inline pass. */
2036
          && (!flag_unit_at_a_time || cgraph_global_info_ready))
2037
        {
2038
          sorry ("inlining failed in call to %q+F: %s", fn, reason);
2039
          sorry ("called from here");
2040
        }
2041
      else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
2042
               && !DECL_IN_SYSTEM_HEADER (fn)
2043
               && strlen (reason)
2044
               && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
2045
               /* Avoid warnings during early inline pass. */
2046
               && (!flag_unit_at_a_time || cgraph_global_info_ready))
2047
        {
2048
          warning (OPT_Winline, "inlining failed in call to %q+F: %s",
2049
                   fn, reason);
2050
          warning (OPT_Winline, "called from here");
2051
        }
2052
      goto egress;
2053
    }
2054
 
2055
#ifdef ENABLE_CHECKING
2056
  if (cg_edge->callee->decl != id->node->decl)
2057
    verify_cgraph_node (cg_edge->callee);
2058
#endif
2059
 
2060
  /* We will be inlining this callee.  */
2061
 
2062
  id->eh_region = lookup_stmt_eh_region (stmt);
2063
 
2064
  /* Split the block holding the CALL_EXPR.  */
2065
 
2066
  e = split_block (bb, stmt);
2067
  bb = e->src;
2068
  return_block = e->dest;
2069
  remove_edge (e);
2070
 
2071
  /* split_block splits before the statement, work around this by moving
2072
     the call into the first half_bb.  Not pretty, but seems easier than
2073
     doing the CFG manipulation by hand when the CALL_EXPR is in the last
2074
     statement in BB.  */
2075
  stmt_bsi = bsi_last (bb);
2076
  bsi = bsi_start (return_block);
2077
  if (!bsi_end_p (bsi))
2078
    bsi_move_before (&stmt_bsi, &bsi);
2079
  else
2080
    {
2081
      tree stmt = bsi_stmt (stmt_bsi);
2082
      bsi_remove (&stmt_bsi);
2083
      bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2084
    }
2085
  stmt_bsi = bsi_start (return_block);
2086
 
2087
  /* Build a block containing code to initialize the arguments, the
2088
     actual inline expansion of the body, and a label for the return
2089
     statements within the function to jump to.  The type of the
2090
     statement expression is the return type of the function call.  */
2091
  id->block = make_node (BLOCK);
2092
  BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2093
  BLOCK_SOURCE_LOCATION (id->block) = input_location;
2094
  add_lexical_block (TREE_BLOCK (stmt), id->block);
2095
 
2096
  /* Local declarations will be replaced by their equivalents in this
2097
     map.  */
2098
  st = id->decl_map;
2099
  id->decl_map = splay_tree_new (splay_tree_compare_pointers,
2100
                                 NULL, NULL);
2101
 
2102
  /* Initialize the parameters.  */
2103
  args = TREE_OPERAND (t, 1);
2104
 
2105
  initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb);
2106
 
2107
  /* Record the function we are about to inline.  */
2108
  id->callee = fn;
2109
 
2110
  if (DECL_STRUCT_FUNCTION (fn)->saved_blocks)
2111
    add_lexical_block (id->block, remap_blocks (DECL_STRUCT_FUNCTION (fn)->saved_blocks, id));
2112
  else if (DECL_INITIAL (fn))
2113
    add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2114
 
2115
  /* Return statements in the function body will be replaced by jumps
2116
     to the RET_LABEL.  */
2117
 
2118
  gcc_assert (DECL_INITIAL (fn));
2119
  gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2120
 
2121
  /* Find the lhs to which the result of this call is assigned.  */
2122
  return_slot_addr = NULL;
2123
  if (TREE_CODE (stmt) == MODIFY_EXPR)
2124
    {
2125
      modify_dest = TREE_OPERAND (stmt, 0);
2126
 
2127
      /* The function which we are inlining might not return a value,
2128
         in which case we should issue a warning that the function
2129
         does not return a value.  In that case the optimizers will
2130
         see that the variable to which the value is assigned was not
2131
         initialized.  We do not want to issue a warning about that
2132
         uninitialized variable.  */
2133
      if (DECL_P (modify_dest))
2134
        TREE_NO_WARNING (modify_dest) = 1;
2135
      if (CALL_EXPR_RETURN_SLOT_OPT (t))
2136
        {
2137
          return_slot_addr = build_fold_addr_expr (modify_dest);
2138
          STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2139
          modify_dest = NULL;
2140
        }
2141
    }
2142
  else
2143
    modify_dest = NULL;
2144
 
2145
  /* Declare the return variable for the function.  */
2146
  decl = declare_return_variable (id, return_slot_addr,
2147
                                  modify_dest, &use_retvar);
2148
  /* Do this only if declare_return_variable created a new one.  */
2149
  if (decl && !return_slot_addr && decl != modify_dest)
2150
    declare_inline_vars (id->block, decl);
2151
 
2152
  /* After we've initialized the parameters, we insert the body of the
2153
     function itself.  */
2154
  old_node = id->current_node;
2155
 
2156
  /* Anoint the callee-to-be-duplicated as the "current_node."  When
2157
     CALL_EXPRs within callee are duplicated, the edges from callee to
2158
     callee's callees (caller's grandchildren) will be cloned.  */
2159
  id->current_node = cg_edge->callee;
2160
 
2161
  /* This is it.  Duplicate the callee body.  Assume callee is
2162
     pre-gimplified.  Note that we must not alter the caller
2163
     function in any way before this point, as this CALL_EXPR may be
2164
     a self-referential call; if we're calling ourselves, we need to
2165
     duplicate our body before altering anything.  */
2166
  copy_body (id, bb->count, bb->frequency, bb, return_block);
2167
  id->current_node = old_node;
2168
 
2169
  /* Add local vars in this inlined callee to caller.  */
2170
  t_step = id->callee_cfun->unexpanded_var_list;
2171
  if (id->callee_cfun->saved_unexpanded_var_list)
2172
    t_step = id->callee_cfun->saved_unexpanded_var_list;
2173
  for (; t_step; t_step = TREE_CHAIN (t_step))
2174
    {
2175
      var = TREE_VALUE (t_step);
2176
      if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2177
        cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2178
                                               cfun->unexpanded_var_list);
2179
      else
2180
        cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2181
                                               cfun->unexpanded_var_list);
2182
    }
2183
 
2184
  /* Clean up.  */
2185
  splay_tree_delete (id->decl_map);
2186
  id->decl_map = st;
2187
 
2188
  /* If the inlined function returns a result that we care about,
2189
     clobber the CALL_EXPR with a reference to the return variable.  */
2190
  if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2191
    {
2192
      *tp = use_retvar;
2193
      maybe_clean_or_replace_eh_stmt (stmt, stmt);
2194
    }
2195
  else
2196
    /* We're modifying a TSI owned by gimple_expand_calls_inline();
2197
       tsi_delink() will leave the iterator in a sane state.  */
2198
    bsi_remove (&stmt_bsi);
2199
 
2200
  bsi_next (&bsi);
2201
  if (bsi_end_p (bsi))
2202
    tree_purge_dead_eh_edges (return_block);
2203
 
2204
  /* If the value of the new expression is ignored, that's OK.  We
2205
     don't warn about this for CALL_EXPRs, so we shouldn't warn about
2206
     the equivalent inlined version either.  */
2207
  TREE_USED (*tp) = 1;
2208
 
2209
  /* Output the inlining info for this abstract function, since it has been
2210
     inlined.  If we don't do this now, we can lose the information about the
2211
     variables in the function when the blocks get blown away as soon as we
2212
     remove the cgraph node.  */
2213
  (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2214
 
2215
  /* Update callgraph if needed.  */
2216
  cgraph_remove_node (cg_edge->callee);
2217
 
2218
  /* Declare the 'auto' variables added with this inlined body.  */
2219
  record_vars (BLOCK_VARS (id->block));
2220
  id->block = NULL_TREE;
2221
  successfully_inlined = TRUE;
2222
 
2223
 egress:
2224
  input_location = saved_location;
2225
  return successfully_inlined;
2226
}
2227
 
2228
/* Expand call statements reachable from STMT_P.
2229
   We can only have CALL_EXPRs as the "toplevel" tree code or nested
2230
   in a MODIFY_EXPR.  See tree-gimple.c:get_call_expr_in().  We can
2231
   unfortunately not use that function here because we need a pointer
2232
   to the CALL_EXPR, not the tree itself.  */
2233
 
2234
static bool
2235
gimple_expand_calls_inline (basic_block bb, inline_data *id)
2236
{
2237
  block_stmt_iterator bsi;
2238
 
2239
  /* Register specific tree functions.  */
2240
  tree_register_cfg_hooks ();
2241
  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2242
    {
2243
      tree *expr_p = bsi_stmt_ptr (bsi);
2244
      tree stmt = *expr_p;
2245
 
2246
      if (TREE_CODE (*expr_p) == MODIFY_EXPR)
2247
        expr_p = &TREE_OPERAND (*expr_p, 1);
2248
      if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2249
        expr_p = &TREE_OPERAND (*expr_p, 0);
2250
      if (TREE_CODE (*expr_p) == CALL_EXPR)
2251
        if (expand_call_inline (bb, stmt, expr_p, id))
2252
          return true;
2253
    }
2254
  return false;
2255
}
2256
 
2257
/* Expand calls to inline functions in the body of FN.  */
2258
 
2259
void
2260
optimize_inline_calls (tree fn)
2261
{
2262
  inline_data id;
2263
  tree prev_fn;
2264
  basic_block bb;
2265
  /* There is no point in performing inlining if errors have already
2266
     occurred -- and we might crash if we try to inline invalid
2267
     code.  */
2268
  if (errorcount || sorrycount)
2269
    return;
2270
 
2271
  /* Clear out ID.  */
2272
  memset (&id, 0, sizeof (id));
2273
 
2274
  id.current_node = id.node = cgraph_node (fn);
2275
  id.caller = fn;
2276
  /* Or any functions that aren't finished yet.  */
2277
  prev_fn = NULL_TREE;
2278
  if (current_function_decl)
2279
    {
2280
      id.caller = current_function_decl;
2281
      prev_fn = current_function_decl;
2282
    }
2283
  push_gimplify_context ();
2284
 
2285
  /* Reach the trees by walking over the CFG, and note the
2286
     enclosing basic-blocks in the call edges.  */
2287
  /* We walk the blocks going forward, because inlined function bodies
2288
     will split id->current_basic_block, and the new blocks will
2289
     follow it; we'll trudge through them, processing their CALL_EXPRs
2290
     along the way.  */
2291
  FOR_EACH_BB (bb)
2292
    gimple_expand_calls_inline (bb, &id);
2293
 
2294
 
2295
  pop_gimplify_context (NULL);
2296
  /* Renumber the (code) basic_blocks consecutively.  */
2297
  compact_blocks ();
2298
  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
2299
  number_blocks (fn);
2300
 
2301
#ifdef ENABLE_CHECKING
2302
    {
2303
      struct cgraph_edge *e;
2304
 
2305
      verify_cgraph_node (id.node);
2306
 
2307
      /* Double check that we inlined everything we are supposed to inline.  */
2308
      for (e = id.node->callees; e; e = e->next_callee)
2309
        gcc_assert (e->inline_failed);
2310
    }
2311
#endif
2312
  /* We need to rescale frequencies again to peak at REG_BR_PROB_BASE
2313
     as inlining loops might increase the maximum.  */
2314
  if (ENTRY_BLOCK_PTR->count)
2315
    counts_to_freqs ();
2316
  fold_cond_expr_cond ();
2317
}
2318
 
2319
/* FN is a function that has a complete body, and CLONE is a function whose
2320
   body is to be set to a copy of FN, mapping argument declarations according
2321
   to the ARG_MAP splay_tree.  */
2322
 
2323
void
2324
clone_body (tree clone, tree fn, void *arg_map)
2325
{
2326
  inline_data id;
2327
 
2328
  /* Clone the body, as if we were making an inline call.  But, remap the
2329
     parameters in the callee to the parameters of caller.  */
2330
  memset (&id, 0, sizeof (id));
2331
  id.caller = clone;
2332
  id.callee = fn;
2333
  id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
2334
  id.decl_map = (splay_tree)arg_map;
2335
 
2336
  /* Cloning is treated slightly differently from inlining.  Set
2337
     CLONING_P so that it's clear which operation we're performing.  */
2338
  id.cloning_p = true;
2339
 
2340
  /* We're not inside any EH region.  */
2341
  id.eh_region = -1;
2342
 
2343
  /* Actually copy the body.  */
2344
  append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2345
}
2346
 
2347
/* Save duplicate body in FN.  MAP is used to pass around splay tree
2348
   used to update arguments in restore_body.  */
2349
 
2350
/* Make and return duplicate of body in FN.  Put copies of DECL_ARGUMENTS
2351
   in *arg_copy and of the static chain, if any, in *sc_copy.  */
2352
 
2353
void
2354
save_body (tree fn, tree *arg_copy, tree *sc_copy)
2355
{
2356
  inline_data id;
2357
  tree newdecl, *parg;
2358
  basic_block fn_entry_block;
2359
  tree t_step;
2360
 
2361
  memset (&id, 0, sizeof (id));
2362
  id.callee = fn;
2363
  id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
2364
  id.caller = fn;
2365
  id.node = cgraph_node (fn);
2366
  id.saving_p = true;
2367
  id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2368
  *arg_copy = DECL_ARGUMENTS (fn);
2369
 
2370
  for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
2371
    {
2372
      tree new = copy_node (*parg);
2373
 
2374
      lang_hooks.dup_lang_specific_decl (new);
2375
      DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
2376
      insert_decl_map (&id, *parg, new);
2377
      TREE_CHAIN (new) = TREE_CHAIN (*parg);
2378
      *parg = new;
2379
    }
2380
 
2381
  *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2382
  if (*sc_copy)
2383
    {
2384
      tree new = copy_node (*sc_copy);
2385
 
2386
      lang_hooks.dup_lang_specific_decl (new);
2387
      DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
2388
      insert_decl_map (&id, *sc_copy, new);
2389
      TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
2390
      *sc_copy = new;
2391
    }
2392
 
2393
  /* We're not inside any EH region.  */
2394
  id.eh_region = -1;
2395
 
2396
  insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
2397
 
2398
  DECL_STRUCT_FUNCTION (fn)->saved_blocks
2399
    = remap_blocks (DECL_INITIAL (fn), &id);
2400
  for (t_step = id.callee_cfun->unexpanded_var_list;
2401
       t_step;
2402
       t_step = TREE_CHAIN (t_step))
2403
    {
2404
      tree var = TREE_VALUE (t_step);
2405
      if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2406
        cfun->saved_unexpanded_var_list
2407
          = tree_cons (NULL_TREE, var, cfun->saved_unexpanded_var_list);
2408
      else
2409
        cfun->saved_unexpanded_var_list
2410
          = tree_cons (NULL_TREE, remap_decl (var, &id),
2411
                       cfun->saved_unexpanded_var_list);
2412
    }
2413
 
2414
  /* Actually copy the body, including a new (struct function *) and CFG.
2415
     EH info is also duplicated so its labels point into the copied
2416
     CFG, not the original.  */
2417
  fn_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fn));
2418
  newdecl = copy_body (&id, fn_entry_block->count, fn_entry_block->frequency,
2419
                       NULL, NULL);
2420
  DECL_STRUCT_FUNCTION (fn)->saved_cfg = DECL_STRUCT_FUNCTION (newdecl)->cfg;
2421
  DECL_STRUCT_FUNCTION (fn)->saved_eh = DECL_STRUCT_FUNCTION (newdecl)->eh;
2422
 
2423
  /* Clean up.  */
2424
  splay_tree_delete (id.decl_map);
2425
}
2426
 
2427
/* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
2428
 
2429
tree
2430
copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2431
{
2432
  enum tree_code code = TREE_CODE (*tp);
2433
  inline_data *id = (inline_data *) data;
2434
 
2435
  /* We make copies of most nodes.  */
2436
  if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2437
      || code == TREE_LIST
2438
      || code == TREE_VEC
2439
      || code == TYPE_DECL)
2440
    {
2441
      /* Because the chain gets clobbered when we make a copy, we save it
2442
         here.  */
2443
      tree chain = TREE_CHAIN (*tp);
2444
      tree new;
2445
 
2446
      if (id && id->versioning_p && replace_ref_tree (id, tp))
2447
        {
2448
          *walk_subtrees = 0;
2449
          return NULL_TREE;
2450
        }
2451
      /* Copy the node.  */
2452
      new = copy_node (*tp);
2453
 
2454
      /* Propagate mudflap marked-ness.  */
2455
      if (flag_mudflap && mf_marked_p (*tp))
2456
        mf_mark (new);
2457
 
2458
      *tp = new;
2459
 
2460
      /* Now, restore the chain, if appropriate.  That will cause
2461
         walk_tree to walk into the chain as well.  */
2462
      if (code == PARM_DECL || code == TREE_LIST)
2463
        TREE_CHAIN (*tp) = chain;
2464
 
2465
      /* For now, we don't update BLOCKs when we make copies.  So, we
2466
         have to nullify all BIND_EXPRs.  */
2467
      if (TREE_CODE (*tp) == BIND_EXPR)
2468
        BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2469
    }
2470
  else if (code == CONSTRUCTOR)
2471
    {
2472
      /* CONSTRUCTOR nodes need special handling because
2473
         we need to duplicate the vector of elements.  */
2474
      tree new;
2475
 
2476
      new = copy_node (*tp);
2477
 
2478
      /* Propagate mudflap marked-ness.  */
2479
      if (flag_mudflap && mf_marked_p (*tp))
2480
        mf_mark (new);
2481
 
2482
      CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
2483
                                         CONSTRUCTOR_ELTS (*tp));
2484
      *tp = new;
2485
    }
2486
  else if (TREE_CODE_CLASS (code) == tcc_type)
2487
    *walk_subtrees = 0;
2488
  else if (TREE_CODE_CLASS (code) == tcc_declaration)
2489
    *walk_subtrees = 0;
2490
  else if (TREE_CODE_CLASS (code) == tcc_constant)
2491
    *walk_subtrees = 0;
2492
  else
2493
    gcc_assert (code != STATEMENT_LIST);
2494
  return NULL_TREE;
2495
}
2496
 
2497
/* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
2498
   information indicating to what new SAVE_EXPR this one should be mapped,
2499
   use that one.  Otherwise, create a new node and enter it in ST.  FN is
2500
   the function into which the copy will be placed.  */
2501
 
2502
static void
2503
remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2504
{
2505
  splay_tree st = (splay_tree) st_;
2506
  splay_tree_node n;
2507
  tree t;
2508
 
2509
  /* See if we already encountered this SAVE_EXPR.  */
2510
  n = splay_tree_lookup (st, (splay_tree_key) *tp);
2511
 
2512
  /* If we didn't already remap this SAVE_EXPR, do so now.  */
2513
  if (!n)
2514
    {
2515
      t = copy_node (*tp);
2516
 
2517
      /* Remember this SAVE_EXPR.  */
2518
      splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2519
      /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
2520
      splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2521
    }
2522
  else
2523
    {
2524
      /* We've already walked into this SAVE_EXPR; don't do it again.  */
2525
      *walk_subtrees = 0;
2526
      t = (tree) n->value;
2527
    }
2528
 
2529
  /* Replace this SAVE_EXPR with the copy.  */
2530
  *tp = t;
2531
}
2532
 
2533
/* Called via walk_tree.  If *TP points to a DECL_STMT for a local label,
2534
   copies the declaration and enters it in the splay_tree in DATA (which is
2535
   really an `inline_data *').  */
2536
 
2537
static tree
2538
mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2539
                        void *data)
2540
{
2541
  inline_data *id = (inline_data *) data;
2542
 
2543
  /* Don't walk into types.  */
2544
  if (TYPE_P (*tp))
2545
    *walk_subtrees = 0;
2546
 
2547
  else if (TREE_CODE (*tp) == LABEL_EXPR)
2548
    {
2549
      tree decl = TREE_OPERAND (*tp, 0);
2550
 
2551
      /* Copy the decl and remember the copy.  */
2552
      insert_decl_map (id, decl,
2553
                       copy_decl_for_dup (decl, DECL_CONTEXT (decl),
2554
                                          DECL_CONTEXT (decl),  /*versioning=*/false));
2555
    }
2556
 
2557
  return NULL_TREE;
2558
}
2559
 
2560
/* Perform any modifications to EXPR required when it is unsaved.  Does
2561
   not recurse into EXPR's subtrees.  */
2562
 
2563
static void
2564
unsave_expr_1 (tree expr)
2565
{
2566
  switch (TREE_CODE (expr))
2567
    {
2568
    case TARGET_EXPR:
2569
      /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2570
         It's OK for this to happen if it was part of a subtree that
2571
         isn't immediately expanded, such as operand 2 of another
2572
         TARGET_EXPR.  */
2573
      if (TREE_OPERAND (expr, 1))
2574
        break;
2575
 
2576
      TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2577
      TREE_OPERAND (expr, 3) = NULL_TREE;
2578
      break;
2579
 
2580
    default:
2581
      break;
2582
    }
2583
}
2584
 
2585
/* Called via walk_tree when an expression is unsaved.  Using the
2586
   splay_tree pointed to by ST (which is really a `splay_tree'),
2587
   remaps all local declarations to appropriate replacements.  */
2588
 
2589
static tree
2590
unsave_r (tree *tp, int *walk_subtrees, void *data)
2591
{
2592
  inline_data *id = (inline_data *) data;
2593
  splay_tree st = id->decl_map;
2594
  splay_tree_node n;
2595
 
2596
  /* Only a local declaration (variable or label).  */
2597
  if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2598
      || TREE_CODE (*tp) == LABEL_DECL)
2599
    {
2600
      /* Lookup the declaration.  */
2601
      n = splay_tree_lookup (st, (splay_tree_key) *tp);
2602
 
2603
      /* If it's there, remap it.  */
2604
      if (n)
2605
        *tp = (tree) n->value;
2606
    }
2607
 
2608
  else if (TREE_CODE (*tp) == STATEMENT_LIST)
2609
    copy_statement_list (tp);
2610
  else if (TREE_CODE (*tp) == BIND_EXPR)
2611
    copy_bind_expr (tp, walk_subtrees, id);
2612
  else if (TREE_CODE (*tp) == SAVE_EXPR)
2613
    remap_save_expr (tp, st, walk_subtrees);
2614
  else
2615
    {
2616
      copy_tree_r (tp, walk_subtrees, NULL);
2617
 
2618
      /* Do whatever unsaving is required.  */
2619
      unsave_expr_1 (*tp);
2620
    }
2621
 
2622
  /* Keep iterating.  */
2623
  return NULL_TREE;
2624
}
2625
 
2626
/* Copies everything in EXPR and replaces variables, labels
2627
   and SAVE_EXPRs local to EXPR.  */
2628
 
2629
tree
2630
unsave_expr_now (tree expr)
2631
{
2632
  inline_data id;
2633
 
2634
  /* There's nothing to do for NULL_TREE.  */
2635
  if (expr == 0)
2636
    return expr;
2637
 
2638
  /* Set up ID.  */
2639
  memset (&id, 0, sizeof (id));
2640
  id.callee = current_function_decl;
2641
  id.caller = current_function_decl;
2642
  id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2643
 
2644
  /* Walk the tree once to find local labels.  */
2645
  walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2646
 
2647
  /* Walk the tree again, copying, remapping, and unsaving.  */
2648
  walk_tree (&expr, unsave_r, &id, NULL);
2649
 
2650
  /* Clean up.  */
2651
  splay_tree_delete (id.decl_map);
2652
 
2653
  return expr;
2654
}
2655
 
2656
/* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
2657
 
2658
static tree
2659
debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2660
{
2661
  if (*tp == data)
2662
    return (tree) data;
2663
  else
2664
    return NULL;
2665
}
2666
 
2667
bool
2668
debug_find_tree (tree top, tree search)
2669
{
2670
  return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2671
}
2672
 
2673
 
2674
/* Declare the variables created by the inliner.  Add all the variables in
2675
   VARS to BIND_EXPR.  */
2676
 
2677
static void
2678
declare_inline_vars (tree block, tree vars)
2679
{
2680
  tree t;
2681
  for (t = vars; t; t = TREE_CHAIN (t))
2682
    DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2683
 
2684
  if (block)
2685
    BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
2686
}
2687
 
2688
 
2689
/* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
2690
   but now it will be in the TO_FN.  VERSIONING means that this function
2691
   is used by the versioning utility (not inlining or cloning).  */
2692
 
2693
tree
2694
copy_decl_for_dup (tree decl, tree from_fn, tree to_fn, bool versioning)
2695
{
2696
  tree copy;
2697
 
2698
  gcc_assert (DECL_P (decl));
2699
  /* Copy the declaration.  */
2700
  if (!versioning
2701
      && (TREE_CODE (decl) == PARM_DECL
2702
          || TREE_CODE (decl) == RESULT_DECL))
2703
    {
2704
      tree type = TREE_TYPE (decl);
2705
 
2706
      /* For a parameter or result, we must make an equivalent VAR_DECL,
2707
         not a new PARM_DECL.  */
2708
      copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
2709
      TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
2710
      TREE_READONLY (copy) = TREE_READONLY (decl);
2711
      TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
2712
      DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl);
2713
    }
2714
  else
2715
    {
2716
      copy = copy_node (decl);
2717
      /* The COPY is not abstract; it will be generated in TO_FN.  */
2718
      DECL_ABSTRACT (copy) = 0;
2719
      lang_hooks.dup_lang_specific_decl (copy);
2720
 
2721
      /* TREE_ADDRESSABLE isn't used to indicate that a label's
2722
         address has been taken; it's for internal bookkeeping in
2723
         expand_goto_internal.  */
2724
      if (TREE_CODE (copy) == LABEL_DECL)
2725
        {
2726
          TREE_ADDRESSABLE (copy) = 0;
2727
          LABEL_DECL_UID (copy) = -1;
2728
        }
2729
    }
2730
 
2731
  /* Don't generate debug information for the copy if we wouldn't have
2732
     generated it for the copy either.  */
2733
  DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
2734
  DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
2735
 
2736
  /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
2737
     declaration inspired this copy.  */
2738
  DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
2739
 
2740
  /* The new variable/label has no RTL, yet.  */
2741
  if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
2742
      && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
2743
    SET_DECL_RTL (copy, NULL_RTX);
2744
 
2745
  /* These args would always appear unused, if not for this.  */
2746
  TREE_USED (copy) = 1;
2747
 
2748
  /* Set the context for the new declaration.  */
2749
  if (!DECL_CONTEXT (decl))
2750
    /* Globals stay global.  */
2751
    ;
2752
  else if (DECL_CONTEXT (decl) != from_fn)
2753
    /* Things that weren't in the scope of the function we're inlining
2754
       from aren't in the scope we're inlining to, either.  */
2755
    ;
2756
  else if (TREE_STATIC (decl))
2757
    /* Function-scoped static variables should stay in the original
2758
       function.  */
2759
    ;
2760
  else
2761
    /* Ordinary automatic local variables are now in the scope of the
2762
       new function.  */
2763
    DECL_CONTEXT (copy) = to_fn;
2764
 
2765
  return copy;
2766
}
2767
 
2768
/* Return a copy of the function's argument tree.  */
2769
static tree
2770
copy_arguments_for_versioning (tree orig_parm, inline_data * id)
2771
{
2772
  tree *arg_copy, *parg;
2773
 
2774
  arg_copy = &orig_parm;
2775
  for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
2776
    {
2777
      tree new = remap_decl (*parg, id);
2778
      lang_hooks.dup_lang_specific_decl (new);
2779
      TREE_CHAIN (new) = TREE_CHAIN (*parg);
2780
      *parg = new;
2781
    }
2782
  return orig_parm;
2783
}
2784
 
2785
/* Return a copy of the function's static chain.  */
2786
static tree
2787
copy_static_chain (tree static_chain, inline_data * id)
2788
{
2789
  tree *chain_copy, *pvar;
2790
 
2791
  chain_copy = &static_chain;
2792
  for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
2793
    {
2794
      tree new = remap_decl (*pvar, id);
2795
      lang_hooks.dup_lang_specific_decl (new);
2796
      TREE_CHAIN (new) = TREE_CHAIN (*pvar);
2797
      *pvar = new;
2798
    }
2799
  return static_chain;
2800
}
2801
 
2802
/* Return true if the function is allowed to be versioned.
2803
   This is a guard for the versioning functionality.  */
2804
bool
2805
tree_versionable_function_p (tree fndecl)
2806
{
2807
  if (fndecl == NULL_TREE)
2808
    return false;
2809
  /* ??? There are cases where a function is
2810
     uninlinable but can be versioned.  */
2811
  if (!tree_inlinable_function_p (fndecl))
2812
    return false;
2813
 
2814
  return true;
2815
}
2816
 
2817
/* Create a copy of a function's tree.
2818
   OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
2819
   of the original function and the new copied function
2820
   respectively.  In case we want to replace a DECL
2821
   tree with another tree while duplicating the function's
2822
   body, TREE_MAP represents the mapping between these
2823
   trees.  */
2824
void
2825
tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map)
2826
{
2827
  struct cgraph_node *old_version_node;
2828
  struct cgraph_node *new_version_node;
2829
  inline_data id;
2830
  tree p, new_fndecl;
2831
  unsigned i;
2832
  struct ipa_replace_map *replace_info;
2833
  basic_block old_entry_block;
2834
  tree t_step;
2835
 
2836
  gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
2837
              && TREE_CODE (new_decl) == FUNCTION_DECL);
2838
  DECL_POSSIBLY_INLINED (old_decl) = 1;
2839
 
2840
  old_version_node = cgraph_node (old_decl);
2841
  new_version_node = cgraph_node (new_decl);
2842
 
2843
  allocate_struct_function (new_decl);
2844
  /* Cfun points to the new allocated function struct at this point.  */
2845
  cfun->function_end_locus = DECL_SOURCE_LOCATION (new_decl);
2846
 
2847
  DECL_ARTIFICIAL (new_decl) = 1;
2848
  DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
2849
 
2850
  /* Generate a new name for the new version. */
2851
  DECL_NAME (new_decl) =
2852
    create_tmp_var_name (NULL);
2853
  /* Create a new SYMBOL_REF rtx for the new name. */
2854
  if (DECL_RTL (old_decl) != NULL)
2855
    {
2856
      SET_DECL_RTL (new_decl, copy_rtx (DECL_RTL (old_decl)));
2857
      XEXP (DECL_RTL (new_decl), 0) =
2858
        gen_rtx_SYMBOL_REF (GET_MODE (XEXP (DECL_RTL (old_decl), 0)),
2859
                            IDENTIFIER_POINTER (DECL_NAME (new_decl)));
2860
    }
2861
 
2862
  /* Prepare the data structures for the tree copy.  */
2863
  memset (&id, 0, sizeof (id));
2864
 
2865
  /* The new version. */
2866
  id.node = new_version_node;
2867
 
2868
  /* The old version. */
2869
  id.current_node = cgraph_node (old_decl);
2870
 
2871
  id.versioning_p = true;
2872
  id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2873
  id.caller = new_decl;
2874
  id.callee = old_decl;
2875
  id.callee_cfun = DECL_STRUCT_FUNCTION (old_decl);
2876
 
2877
  current_function_decl = new_decl;
2878
 
2879
  /* Copy the function's static chain.  */
2880
  p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
2881
  if (p)
2882
    DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
2883
      copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
2884
                         &id);
2885
  /* Copy the function's arguments.  */
2886
  if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
2887
    DECL_ARGUMENTS (new_decl) =
2888
      copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
2889
 
2890
  /* If there's a tree_map, prepare for substitution.  */
2891
  if (tree_map)
2892
    for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
2893
      {
2894
        replace_info = VARRAY_GENERIC_PTR (tree_map, i);
2895
        if (replace_info->replace_p && !replace_info->ref_p)
2896
          insert_decl_map (&id, replace_info->old_tree,
2897
                           replace_info->new_tree);
2898
        else if (replace_info->replace_p && replace_info->ref_p)
2899
          id.ipa_info = tree_map;
2900
      }
2901
 
2902
  DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.callee), &id);
2903
 
2904
  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
2905
  number_blocks (id.caller);
2906
 
2907
  if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
2908
    /* Add local vars.  */
2909
    for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
2910
         t_step; t_step = TREE_CHAIN (t_step))
2911
      {
2912
        tree var = TREE_VALUE (t_step);
2913
        if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2914
          cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2915
                                                 cfun->unexpanded_var_list);
2916
        else
2917
          cfun->unexpanded_var_list =
2918
            tree_cons (NULL_TREE, remap_decl (var, &id),
2919
                       cfun->unexpanded_var_list);
2920
      }
2921
 
2922
  /* Copy the Function's body.  */
2923
  old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
2924
    (DECL_STRUCT_FUNCTION (old_decl));
2925
  new_fndecl = copy_body (&id,
2926
                          old_entry_block->count,
2927
                          old_entry_block->frequency, NULL, NULL);
2928
 
2929
  DECL_SAVED_TREE (new_decl) = DECL_SAVED_TREE (new_fndecl);
2930
 
2931
  DECL_STRUCT_FUNCTION (new_decl)->cfg =
2932
    DECL_STRUCT_FUNCTION (new_fndecl)->cfg;
2933
  DECL_STRUCT_FUNCTION (new_decl)->eh = DECL_STRUCT_FUNCTION (new_fndecl)->eh;
2934
  DECL_STRUCT_FUNCTION (new_decl)->ib_boundaries_block =
2935
    DECL_STRUCT_FUNCTION (new_fndecl)->ib_boundaries_block;
2936
  DECL_STRUCT_FUNCTION (new_decl)->last_label_uid =
2937
    DECL_STRUCT_FUNCTION (new_fndecl)->last_label_uid;
2938
 
2939
  if (DECL_RESULT (old_decl) != NULL_TREE)
2940
    {
2941
      tree *res_decl = &DECL_RESULT (old_decl);
2942
      DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
2943
      lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
2944
    }
2945
 
2946
  current_function_decl = NULL;
2947
  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
2948
  number_blocks (new_decl);
2949
 
2950
  /* Clean up.  */
2951
  splay_tree_delete (id.decl_map);
2952
  fold_cond_expr_cond ();
2953
  return;
2954
}
2955
 
2956
/*  Replace an INDIRECT_REF tree of a given DECL tree with a new
2957
    given tree.
2958
    ID->ipa_info keeps the old tree and the new tree.
2959
    TP points to the INDIRECT REF tree.  Return true if
2960
    the trees were replaced.  */
2961
static bool
2962
replace_ref_tree (inline_data * id, tree * tp)
2963
{
2964
  bool replaced = false;
2965
  tree new;
2966
 
2967
  if (id->ipa_info && VARRAY_ACTIVE_SIZE (id->ipa_info) > 0)
2968
    {
2969
      unsigned i;
2970
 
2971
      for (i = 0; i < VARRAY_ACTIVE_SIZE (id->ipa_info); i++)
2972
        {
2973
          struct ipa_replace_map *replace_info;
2974
          replace_info = VARRAY_GENERIC_PTR (id->ipa_info, i);
2975
 
2976
          if (replace_info->replace_p && replace_info->ref_p)
2977
            {
2978
              tree old_tree = replace_info->old_tree;
2979
              tree new_tree = replace_info->new_tree;
2980
 
2981
              if (TREE_CODE (*tp) == INDIRECT_REF
2982
                  && TREE_OPERAND (*tp, 0) == old_tree)
2983
                {
2984
                  new = copy_node (new_tree);
2985
                  *tp = new;
2986
                  replaced = true;
2987
                }
2988
            }
2989
        }
2990
    }
2991
  return replaced;
2992
}
2993
 
2994
/* Return true if we are inlining.  */
2995
static inline bool
2996
inlining_p (inline_data * id)
2997
{
2998
  return (!id->saving_p && !id->cloning_p && !id->versioning_p);
2999
}
3000
 
3001
/* Duplicate a type, fields and all.  */
3002
 
3003
tree
3004
build_duplicate_type (tree type)
3005
{
3006
  inline_data id;
3007
 
3008
  memset (&id, 0, sizeof (id));
3009
  id.callee = current_function_decl;
3010
  id.caller = current_function_decl;
3011
  id.callee_cfun = cfun;
3012
  id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
3013
 
3014
  type = remap_type_1 (type, &id);
3015
 
3016
  splay_tree_delete (id.decl_map);
3017
 
3018
  return type;
3019
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.