OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [tree-tailcall.c] - Blame information for rev 847

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Tail call optimization on trees.
2
   Copyright (C) 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
3
 
4
This file is part of GCC.
5
 
6
GCC is free software; you can redistribute it and/or modify
7
it under the terms of the GNU General Public License as published by
8
the Free Software Foundation; either version 3, or (at your option)
9
any later version.
10
 
11
GCC is distributed in the hope that it will be useful,
12
but WITHOUT ANY WARRANTY; without even the implied warranty of
13
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
GNU General Public License for more details.
15
 
16
You should have received a copy of the GNU General Public License
17
along with GCC; see the file COPYING3.  If not see
18
<http://www.gnu.org/licenses/>.  */
19
 
20
#include "config.h"
21
#include "system.h"
22
#include "coretypes.h"
23
#include "tm.h"
24
#include "tree.h"
25
#include "rtl.h"
26
#include "tm_p.h"
27
#include "hard-reg-set.h"
28
#include "basic-block.h"
29
#include "function.h"
30
#include "tree-flow.h"
31
#include "tree-dump.h"
32
#include "diagnostic.h"
33
#include "except.h"
34
#include "tree-pass.h"
35
#include "flags.h"
36
#include "langhooks.h"
37
 
38
/* The file implements the tail recursion elimination.  It is also used to
39
   analyze the tail calls in general, passing the results to the rtl level
40
   where they are used for sibcall optimization.
41
 
42
   In addition to the standard tail recursion elimination, we handle the most
43
   trivial cases of making the call tail recursive by creating accumulators.
44
   For example the following function
45
 
46
   int sum (int n)
47
   {
48
     if (n > 0)
49
       return n + sum (n - 1);
50
     else
51
       return 0;
52
   }
53
 
54
   is transformed into
55
 
56
   int sum (int n)
57
   {
58
     int acc = 0;
59
 
60
     while (n > 0)
61
       acc += n--;
62
 
63
     return acc;
64
   }
65
 
66
   To do this, we maintain two accumulators (a_acc and m_acc) that indicate
67
   when we reach the return x statement, we should return a_acc + x * m_acc
68
   instead.  They are initially initialized to 0 and 1, respectively,
69
   so the semantics of the function is obviously preserved.  If we are
70
   guaranteed that the value of the accumulator never change, we
71
   omit the accumulator.
72
 
73
   There are three cases how the function may exit.  The first one is
74
   handled in adjust_return_value, the other two in adjust_accumulator_values
75
   (the second case is actually a special case of the third one and we
76
   present it separately just for clarity):
77
 
78
   1) Just return x, where x is not in any of the remaining special shapes.
79
      We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
80
 
81
   2) return f (...), where f is the current function, is rewritten in a
82
      classical tail-recursion elimination way, into assignment of arguments
83
      and jump to the start of the function.  Values of the accumulators
84
      are unchanged.
85
 
86
   3) return a + m * f(...), where a and m do not depend on call to f.
87
      To preserve the semantics described before we want this to be rewritten
88
      in such a way that we finally return
89
 
90
      a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
91
 
92
      I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
93
      eliminate the tail call to f.  Special cases when the value is just
94
      added or just multiplied are obtained by setting a = 0 or m = 1.
95
 
96
   TODO -- it is possible to do similar tricks for other operations.  */
97
 
98
/* A structure that describes the tailcall.  */
99
 
100
struct tailcall
101
{
102
  /* The block in that the call occur.  */
103
  basic_block call_block;
104
 
105
  /* The iterator pointing to the call statement.  */
106
  block_stmt_iterator call_bsi;
107
 
108
  /* True if it is a call to the current function.  */
109
  bool tail_recursion;
110
 
111
  /* The return value of the caller is mult * f + add, where f is the return
112
     value of the call.  */
113
  tree mult, add;
114
 
115
  /* Next tailcall in the chain.  */
116
  struct tailcall *next;
117
};
118
 
119
/* The variables holding the value of multiplicative and additive
120
   accumulator.  */
121
static tree m_acc, a_acc;
122
 
123
static bool suitable_for_tail_opt_p (void);
124
static bool optimize_tail_call (struct tailcall *, bool);
125
static void eliminate_tail_call (struct tailcall *);
126
static void find_tail_calls (basic_block, struct tailcall **);
127
 
128
/* Returns false when the function is not suitable for tail call optimization
129
   from some reason (e.g. if it takes variable number of arguments).  */
130
 
131
static bool
132
suitable_for_tail_opt_p (void)
133
{
134
  referenced_var_iterator rvi;
135
  tree var;
136
 
137
  if (current_function_stdarg)
138
    return false;
139
 
140
  /* No local variable nor structure field should be call-clobbered.  We
141
     ignore any kind of memory tag, as these are not real variables.  */
142
 
143
  FOR_EACH_REFERENCED_VAR (var, rvi)
144
    {
145
 
146
      if (!is_global_var (var)
147
          && (!MTAG_P (var) || TREE_CODE (var) == STRUCT_FIELD_TAG)
148
          && is_call_clobbered (var))
149
        return false;
150
    }
151
 
152
  return true;
153
}
154
/* Returns false when the function is not suitable for tail call optimization
155
   from some reason (e.g. if it takes variable number of arguments).
156
   This test must pass in addition to suitable_for_tail_opt_p in order to make
157
   tail call discovery happen.  */
158
 
159
static bool
160
suitable_for_tail_call_opt_p (void)
161
{
162
  tree param;
163
 
164
  /* alloca (until we have stack slot life analysis) inhibits
165
     sibling call optimizations, but not tail recursion.  */
166
  if (current_function_calls_alloca)
167
    return false;
168
 
169
  /* If we are using sjlj exceptions, we may need to add a call to
170
     _Unwind_SjLj_Unregister at exit of the function.  Which means
171
     that we cannot do any sibcall transformations.  */
172
  if (USING_SJLJ_EXCEPTIONS && current_function_has_exception_handlers ())
173
    return false;
174
 
175
  /* Any function that calls setjmp might have longjmp called from
176
     any called function.  ??? We really should represent this
177
     properly in the CFG so that this needn't be special cased.  */
178
  if (current_function_calls_setjmp)
179
    return false;
180
 
181
  /* ??? It is OK if the argument of a function is taken in some cases,
182
     but not in all cases.  See PR15387 and PR19616.  Revisit for 4.1.  */
183
  for (param = DECL_ARGUMENTS (current_function_decl);
184
       param;
185
       param = TREE_CHAIN (param))
186
    if (TREE_ADDRESSABLE (param))
187
      return false;
188
 
189
  return true;
190
}
191
 
192
/* Checks whether the expression EXPR in stmt AT is independent of the
193
   statement pointed to by BSI (in a sense that we already know EXPR's value
194
   at BSI).  We use the fact that we are only called from the chain of
195
   basic blocks that have only single successor.  Returns the expression
196
   containing the value of EXPR at BSI.  */
197
 
198
static tree
199
independent_of_stmt_p (tree expr, tree at, block_stmt_iterator bsi)
200
{
201
  basic_block bb, call_bb, at_bb;
202
  edge e;
203
  edge_iterator ei;
204
 
205
  if (is_gimple_min_invariant (expr))
206
    return expr;
207
 
208
  if (TREE_CODE (expr) != SSA_NAME)
209
    return NULL_TREE;
210
 
211
  /* Mark the blocks in the chain leading to the end.  */
212
  at_bb = bb_for_stmt (at);
213
  call_bb = bb_for_stmt (bsi_stmt (bsi));
214
  for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
215
    bb->aux = &bb->aux;
216
  bb->aux = &bb->aux;
217
 
218
  while (1)
219
    {
220
      at = SSA_NAME_DEF_STMT (expr);
221
      bb = bb_for_stmt (at);
222
 
223
      /* The default definition or defined before the chain.  */
224
      if (!bb || !bb->aux)
225
        break;
226
 
227
      if (bb == call_bb)
228
        {
229
          for (; !bsi_end_p (bsi); bsi_next (&bsi))
230
            if (bsi_stmt (bsi) == at)
231
              break;
232
 
233
          if (!bsi_end_p (bsi))
234
            expr = NULL_TREE;
235
          break;
236
        }
237
 
238
      if (TREE_CODE (at) != PHI_NODE)
239
        {
240
          expr = NULL_TREE;
241
          break;
242
        }
243
 
244
      FOR_EACH_EDGE (e, ei, bb->preds)
245
        if (e->src->aux)
246
          break;
247
      gcc_assert (e);
248
 
249
      expr = PHI_ARG_DEF_FROM_EDGE (at, e);
250
      if (TREE_CODE (expr) != SSA_NAME)
251
        {
252
          /* The value is a constant.  */
253
          break;
254
        }
255
    }
256
 
257
  /* Unmark the blocks.  */
258
  for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
259
    bb->aux = NULL;
260
  bb->aux = NULL;
261
 
262
  return expr;
263
}
264
 
265
/* Simulates the effect of an assignment of ASS in STMT on the return value
266
   of the tail recursive CALL passed in ASS_VAR.  M and A are the
267
   multiplicative and the additive factor for the real return value.  */
268
 
269
static bool
270
process_assignment (tree ass, tree stmt, block_stmt_iterator call, tree *m,
271
                    tree *a, tree *ass_var)
272
{
273
  tree op0, op1, non_ass_var;
274
  tree dest = TREE_OPERAND (ass, 0);
275
  tree src = TREE_OPERAND (ass, 1);
276
  enum tree_code code = TREE_CODE (src);
277
  tree src_var = src;
278
 
279
  /* See if this is a simple copy operation of an SSA name to the function
280
     result.  In that case we may have a simple tail call.  Ignore type
281
     conversions that can never produce extra code between the function
282
     call and the function return.  */
283
  STRIP_NOPS (src_var);
284
  if (TREE_CODE (src_var) == SSA_NAME)
285
    {
286
      if (src_var != *ass_var)
287
        return false;
288
 
289
      *ass_var = dest;
290
      return true;
291
    }
292
 
293
  if (TREE_CODE_CLASS (code) != tcc_binary)
294
    return false;
295
 
296
  /* Accumulator optimizations will reverse the order of operations.
297
     We can only do that for floating-point types if we're assuming
298
     that addition and multiplication are associative.  */
299
  if (!flag_unsafe_math_optimizations)
300
    if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
301
      return false;
302
 
303
  /* We only handle the code like
304
 
305
     x = call ();
306
     y = m * x;
307
     z = y + a;
308
     return z;
309
 
310
     TODO -- Extend it for cases where the linear transformation of the output
311
     is expressed in a more complicated way.  */
312
 
313
  op0 = TREE_OPERAND (src, 0);
314
  op1 = TREE_OPERAND (src, 1);
315
 
316
  if (op0 == *ass_var
317
      && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
318
    ;
319
  else if (op1 == *ass_var
320
           && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
321
    ;
322
  else
323
    return false;
324
 
325
  switch (code)
326
    {
327
    case PLUS_EXPR:
328
      /* There should be no previous addition.  TODO -- it should be fairly
329
         straightforward to lift this restriction -- just allow storing
330
         more complicated expressions in *A, and gimplify it in
331
         adjust_accumulator_values.  */
332
      if (*a)
333
        return false;
334
      *a = non_ass_var;
335
      *ass_var = dest;
336
      return true;
337
 
338
    case MULT_EXPR:
339
      /* Similar remark applies here.  Handling multiplication after addition
340
         is just slightly more complicated -- we need to multiply both *A and
341
         *M.  */
342
      if (*a || *m)
343
        return false;
344
      *m = non_ass_var;
345
      *ass_var = dest;
346
      return true;
347
 
348
      /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR).  */
349
 
350
    default:
351
      return false;
352
    }
353
}
354
 
355
/* Propagate VAR through phis on edge E.  */
356
 
357
static tree
358
propagate_through_phis (tree var, edge e)
359
{
360
  basic_block dest = e->dest;
361
  tree phi;
362
 
363
  for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
364
    if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
365
      return PHI_RESULT (phi);
366
 
367
  return var;
368
}
369
 
370
/* Finds tailcalls falling into basic block BB. The list of found tailcalls is
371
   added to the start of RET.  */
372
 
373
static void
374
find_tail_calls (basic_block bb, struct tailcall **ret)
375
{
376
  tree ass_var, ret_var, stmt, func, param, args, call = NULL_TREE;
377
  block_stmt_iterator bsi, absi;
378
  bool tail_recursion;
379
  struct tailcall *nw;
380
  edge e;
381
  tree m, a;
382
  basic_block abb;
383
  stmt_ann_t ann;
384
 
385
  if (!single_succ_p (bb))
386
    return;
387
 
388
  for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
389
    {
390
      stmt = bsi_stmt (bsi);
391
 
392
      /* Ignore labels.  */
393
      if (TREE_CODE (stmt) == LABEL_EXPR)
394
        continue;
395
 
396
      /* Check for a call.  */
397
      if (TREE_CODE (stmt) == MODIFY_EXPR)
398
        {
399
          ass_var = TREE_OPERAND (stmt, 0);
400
          call = TREE_OPERAND (stmt, 1);
401
          if (TREE_CODE (call) == WITH_SIZE_EXPR)
402
            call = TREE_OPERAND (call, 0);
403
        }
404
      else
405
        {
406
          ass_var = NULL_TREE;
407
          call = stmt;
408
        }
409
 
410
      if (TREE_CODE (call) == CALL_EXPR)
411
        break;
412
 
413
      /* If the statement has virtual or volatile operands, fail.  */
414
      ann = stmt_ann (stmt);
415
      if (!ZERO_SSA_OPERANDS (stmt, (SSA_OP_VUSE | SSA_OP_VIRTUAL_DEFS))
416
          || ann->has_volatile_ops)
417
        return;
418
    }
419
 
420
  if (bsi_end_p (bsi))
421
    {
422
      edge_iterator ei;
423
      /* Recurse to the predecessors.  */
424
      FOR_EACH_EDGE (e, ei, bb->preds)
425
        find_tail_calls (e->src, ret);
426
 
427
      return;
428
    }
429
 
430
  /* We found the call, check whether it is suitable.  */
431
  tail_recursion = false;
432
  func = get_callee_fndecl (call);
433
  if (func == current_function_decl)
434
    {
435
      for (param = DECL_ARGUMENTS (func), args = TREE_OPERAND (call, 1);
436
           param && args;
437
           param = TREE_CHAIN (param), args = TREE_CHAIN (args))
438
        {
439
          tree arg = TREE_VALUE (args);
440
          if (param != arg)
441
            {
442
              /* Make sure there are no problems with copying.  The parameter
443
                 have a copyable type and the two arguments must have reasonably
444
                 equivalent types.  The latter requirement could be relaxed if
445
                 we emitted a suitable type conversion statement.  */
446
              if (!is_gimple_reg_type (TREE_TYPE (param))
447
                  || !lang_hooks.types_compatible_p (TREE_TYPE (param),
448
                                                     TREE_TYPE (arg)))
449
                break;
450
 
451
              /* The parameter should be a real operand, so that phi node
452
                 created for it at the start of the function has the meaning
453
                 of copying the value.  This test implies is_gimple_reg_type
454
                 from the previous condition, however this one could be
455
                 relaxed by being more careful with copying the new value
456
                 of the parameter (emitting appropriate MODIFY_EXPR and
457
                 updating the virtual operands).  */
458
              if (!is_gimple_reg (param))
459
                break;
460
            }
461
        }
462
      if (!args && !param)
463
        tail_recursion = true;
464
    }
465
 
466
  /* Now check the statements after the call.  None of them has virtual
467
     operands, so they may only depend on the call through its return
468
     value.  The return value should also be dependent on each of them,
469
     since we are running after dce.  */
470
  m = NULL_TREE;
471
  a = NULL_TREE;
472
 
473
  abb = bb;
474
  absi = bsi;
475
  while (1)
476
    {
477
      bsi_next (&absi);
478
 
479
      while (bsi_end_p (absi))
480
        {
481
          ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
482
          abb = single_succ (abb);
483
          absi = bsi_start (abb);
484
        }
485
 
486
      stmt = bsi_stmt (absi);
487
 
488
      if (TREE_CODE (stmt) == LABEL_EXPR)
489
        continue;
490
 
491
      if (TREE_CODE (stmt) == RETURN_EXPR)
492
        break;
493
 
494
      if (TREE_CODE (stmt) != MODIFY_EXPR)
495
        return;
496
 
497
      if (!process_assignment (stmt, stmt, bsi, &m, &a, &ass_var))
498
        return;
499
    }
500
 
501
  /* See if this is a tail call we can handle.  */
502
  ret_var = TREE_OPERAND (stmt, 0);
503
  if (ret_var
504
      && TREE_CODE (ret_var) == MODIFY_EXPR)
505
    {
506
      tree ret_op = TREE_OPERAND (ret_var, 1);
507
      STRIP_NOPS (ret_op);
508
      if (!tail_recursion
509
          && TREE_CODE (ret_op) != SSA_NAME)
510
        return;
511
 
512
      if (!process_assignment (ret_var, stmt, bsi, &m, &a, &ass_var))
513
        return;
514
      ret_var = TREE_OPERAND (ret_var, 0);
515
    }
516
 
517
  /* We may proceed if there either is no return value, or the return value
518
     is identical to the call's return.  */
519
  if (ret_var
520
      && (ret_var != ass_var))
521
    return;
522
 
523
  /* If this is not a tail recursive call, we cannot handle addends or
524
     multiplicands.  */
525
  if (!tail_recursion && (m || a))
526
    return;
527
 
528
  nw = XNEW (struct tailcall);
529
 
530
  nw->call_block = bb;
531
  nw->call_bsi = bsi;
532
 
533
  nw->tail_recursion = tail_recursion;
534
 
535
  nw->mult = m;
536
  nw->add = a;
537
 
538
  nw->next = *ret;
539
  *ret = nw;
540
}
541
 
542
/* Adjust the accumulator values according to A and M after BSI, and update
543
   the phi nodes on edge BACK.  */
544
 
545
static void
546
adjust_accumulator_values (block_stmt_iterator bsi, tree m, tree a, edge back)
547
{
548
  tree stmt, var, phi, tmp;
549
  tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
550
  tree a_acc_arg = a_acc, m_acc_arg = m_acc;
551
 
552
  if (a)
553
    {
554
      if (m_acc)
555
        {
556
          if (integer_onep (a))
557
            var = m_acc;
558
          else
559
            {
560
              stmt = build2 (MODIFY_EXPR, ret_type, NULL_TREE,
561
                             build2 (MULT_EXPR, ret_type, m_acc, a));
562
 
563
              tmp = create_tmp_var (ret_type, "acc_tmp");
564
              add_referenced_var (tmp);
565
 
566
              var = make_ssa_name (tmp, stmt);
567
              TREE_OPERAND (stmt, 0) = var;
568
              bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
569
            }
570
        }
571
      else
572
        var = a;
573
 
574
      stmt = build2 (MODIFY_EXPR, ret_type, NULL_TREE,
575
                     build2 (PLUS_EXPR, ret_type, a_acc, var));
576
      var = make_ssa_name (SSA_NAME_VAR (a_acc), stmt);
577
      TREE_OPERAND (stmt, 0) = var;
578
      bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
579
      a_acc_arg = var;
580
    }
581
 
582
  if (m)
583
    {
584
      stmt = build2 (MODIFY_EXPR, ret_type, NULL_TREE,
585
                     build2 (MULT_EXPR, ret_type, m_acc, m));
586
      var = make_ssa_name (SSA_NAME_VAR (m_acc), stmt);
587
      TREE_OPERAND (stmt, 0) = var;
588
      bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
589
      m_acc_arg = var;
590
    }
591
 
592
  if (a_acc)
593
    {
594
      for (phi = phi_nodes (back->dest); phi; phi = PHI_CHAIN (phi))
595
        if (PHI_RESULT (phi) == a_acc)
596
          break;
597
 
598
      add_phi_arg (phi, a_acc_arg, back);
599
    }
600
 
601
  if (m_acc)
602
    {
603
      for (phi = phi_nodes (back->dest); phi; phi = PHI_CHAIN (phi))
604
        if (PHI_RESULT (phi) == m_acc)
605
          break;
606
 
607
      add_phi_arg (phi, m_acc_arg, back);
608
    }
609
}
610
 
611
/* Adjust value of the return at the end of BB according to M and A
612
   accumulators.  */
613
 
614
static void
615
adjust_return_value (basic_block bb, tree m, tree a)
616
{
617
  tree ret_stmt = last_stmt (bb), ret_var, var, stmt, tmp;
618
  tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
619
  block_stmt_iterator bsi = bsi_last (bb);
620
 
621
  gcc_assert (TREE_CODE (ret_stmt) == RETURN_EXPR);
622
 
623
  ret_var = TREE_OPERAND (ret_stmt, 0);
624
  if (!ret_var)
625
    return;
626
 
627
  if (TREE_CODE (ret_var) == MODIFY_EXPR)
628
    {
629
      ret_var->common.ann = (tree_ann_t) stmt_ann (ret_stmt);
630
      bsi_replace (&bsi, ret_var, true);
631
      SSA_NAME_DEF_STMT (TREE_OPERAND (ret_var, 0)) = ret_var;
632
      ret_var = TREE_OPERAND (ret_var, 0);
633
      ret_stmt = build1 (RETURN_EXPR, TREE_TYPE (ret_stmt), ret_var);
634
      bsi_insert_after (&bsi, ret_stmt, BSI_NEW_STMT);
635
    }
636
 
637
  if (m)
638
    {
639
      stmt = build2 (MODIFY_EXPR, ret_type, NULL_TREE,
640
                     build2 (MULT_EXPR, ret_type, m_acc, ret_var));
641
 
642
      tmp = create_tmp_var (ret_type, "acc_tmp");
643
      add_referenced_var (tmp);
644
 
645
      var = make_ssa_name (tmp, stmt);
646
      TREE_OPERAND (stmt, 0) = var;
647
      bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
648
    }
649
  else
650
    var = ret_var;
651
 
652
  if (a)
653
    {
654
      stmt = build2 (MODIFY_EXPR, ret_type, NULL_TREE,
655
                     build2 (PLUS_EXPR, ret_type, a_acc, var));
656
 
657
      tmp = create_tmp_var (ret_type, "acc_tmp");
658
      add_referenced_var (tmp);
659
 
660
      var = make_ssa_name (tmp, stmt);
661
      TREE_OPERAND (stmt, 0) = var;
662
      bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
663
    }
664
 
665
  TREE_OPERAND (ret_stmt, 0) = var;
666
  update_stmt (ret_stmt);
667
}
668
 
669
/* Subtract COUNT and FREQUENCY from the basic block and it's
670
   outgoing edge.  */
671
static void
672
decrease_profile (basic_block bb, gcov_type count, int frequency)
673
{
674
  edge e;
675
  bb->count -= count;
676
  if (bb->count < 0)
677
    bb->count = 0;
678
  bb->frequency -= frequency;
679
  if (bb->frequency < 0)
680
    bb->frequency = 0;
681
  if (!single_succ_p (bb))
682
    {
683
      gcc_assert (!EDGE_COUNT (bb->succs));
684
      return;
685
    }
686
  e = single_succ_edge (bb);
687
  e->count -= count;
688
  if (e->count < 0)
689
    e->count = 0;
690
}
691
 
692
/* Returns true if argument PARAM of the tail recursive call needs to be copied
693
   when the call is eliminated.  */
694
 
695
static bool
696
arg_needs_copy_p (tree param)
697
{
698
  tree def;
699
 
700
  if (!is_gimple_reg (param) || !var_ann (param))
701
    return false;
702
 
703
  /* Parameters that are only defined but never used need not be copied.  */
704
  def = default_def (param);
705
  if (!def)
706
    return false;
707
 
708
  return true;
709
}
710
 
711
/* Eliminates tail call described by T.  TMP_VARS is a list of
712
   temporary variables used to copy the function arguments.  */
713
 
714
static void
715
eliminate_tail_call (struct tailcall *t)
716
{
717
  tree param, stmt, args, rslt, call;
718
  basic_block bb, first;
719
  edge e;
720
  tree phi;
721
  block_stmt_iterator bsi;
722
  tree orig_stmt;
723
 
724
  stmt = orig_stmt = bsi_stmt (t->call_bsi);
725
  bb = t->call_block;
726
 
727
  if (dump_file && (dump_flags & TDF_DETAILS))
728
    {
729
      fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
730
               bb->index);
731
      print_generic_stmt (dump_file, stmt, TDF_SLIM);
732
      fprintf (dump_file, "\n");
733
    }
734
 
735
  if (TREE_CODE (stmt) == MODIFY_EXPR)
736
    stmt = TREE_OPERAND (stmt, 1);
737
 
738
  first = single_succ (ENTRY_BLOCK_PTR);
739
 
740
  /* Remove the code after call_bsi that will become unreachable.  The
741
     possibly unreachable code in other blocks is removed later in
742
     cfg cleanup.  */
743
  bsi = t->call_bsi;
744
  bsi_next (&bsi);
745
  while (!bsi_end_p (bsi))
746
    {
747
      tree t = bsi_stmt (bsi);
748
      /* Do not remove the return statement, so that redirect_edge_and_branch
749
         sees how the block ends.  */
750
      if (TREE_CODE (t) == RETURN_EXPR)
751
        break;
752
 
753
      bsi_remove (&bsi, true);
754
      release_defs (t);
755
    }
756
 
757
  /* Number of executions of function has reduced by the tailcall.  */
758
  e = single_succ_edge (t->call_block);
759
  decrease_profile (EXIT_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
760
  decrease_profile (ENTRY_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
761
  if (e->dest != EXIT_BLOCK_PTR)
762
    decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
763
 
764
  /* Replace the call by a jump to the start of function.  */
765
  e = redirect_edge_and_branch (single_succ_edge (t->call_block), first);
766
  gcc_assert (e);
767
  PENDING_STMT (e) = NULL_TREE;
768
 
769
  /* Add phi node entries for arguments.  The ordering of the phi nodes should
770
     be the same as the ordering of the arguments.  */
771
  for (param = DECL_ARGUMENTS (current_function_decl),
772
       args = TREE_OPERAND (stmt, 1),
773
       phi = phi_nodes (first);
774
       param;
775
       param = TREE_CHAIN (param),
776
       args = TREE_CHAIN (args))
777
    {
778
      if (!arg_needs_copy_p (param))
779
        continue;
780
      gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
781
 
782
      add_phi_arg (phi, TREE_VALUE (args), e);
783
      phi = PHI_CHAIN (phi);
784
    }
785
 
786
  /* Update the values of accumulators.  */
787
  adjust_accumulator_values (t->call_bsi, t->mult, t->add, e);
788
 
789
  call = bsi_stmt (t->call_bsi);
790
  if (TREE_CODE (call) == MODIFY_EXPR)
791
    {
792
      rslt = TREE_OPERAND (call, 0);
793
 
794
      /* Result of the call will no longer be defined.  So adjust the
795
         SSA_NAME_DEF_STMT accordingly.  */
796
      SSA_NAME_DEF_STMT (rslt) = build_empty_stmt ();
797
    }
798
 
799
  bsi_remove (&t->call_bsi, true);
800
  release_defs (call);
801
}
802
 
803
/* Add phi nodes for the virtual operands defined in the function to the
804
   header of the loop created by tail recursion elimination.
805
 
806
   Originally, we used to add phi nodes only for call clobbered variables,
807
   as the value of the non-call clobbered ones obviously cannot be used
808
   or changed within the recursive call.  However, the local variables
809
   from multiple calls now share the same location, so the virtual ssa form
810
   requires us to say that the location dies on further iterations of the loop,
811
   which requires adding phi nodes.
812
*/
813
static void
814
add_virtual_phis (void)
815
{
816
  referenced_var_iterator rvi;
817
  tree var;
818
 
819
  /* The problematic part is that there is no way how to know what
820
     to put into phi nodes (there in fact does not have to be such
821
     ssa name available).  A solution would be to have an artificial
822
     use/kill for all virtual operands in EXIT node.  Unless we have
823
     this, we cannot do much better than to rebuild the ssa form for
824
     possibly affected virtual ssa names from scratch.  */
825
 
826
  FOR_EACH_REFERENCED_VAR (var, rvi)
827
    {
828
      if (!is_gimple_reg (var) && default_def (var) != NULL_TREE)
829
        mark_sym_for_renaming (var);
830
    }
831
 
832
  update_ssa (TODO_update_ssa_only_virtuals);
833
}
834
 
835
/* Optimizes the tailcall described by T.  If OPT_TAILCALLS is true, also
836
   mark the tailcalls for the sibcall optimization.  */
837
 
838
static bool
839
optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
840
{
841
  if (t->tail_recursion)
842
    {
843
      eliminate_tail_call (t);
844
      return true;
845
    }
846
 
847
  if (opt_tailcalls)
848
    {
849
      tree stmt = bsi_stmt (t->call_bsi);
850
 
851
      stmt = get_call_expr_in (stmt);
852
      CALL_EXPR_TAILCALL (stmt) = 1;
853
      if (dump_file && (dump_flags & TDF_DETAILS))
854
        {
855
          fprintf (dump_file, "Found tail call ");
856
          print_generic_expr (dump_file, stmt, dump_flags);
857
          fprintf (dump_file, " in bb %i\n", t->call_block->index);
858
        }
859
    }
860
 
861
  return false;
862
}
863
 
864
/* Optimizes tail calls in the function, turning the tail recursion
865
   into iteration.  */
866
 
867
static void
868
tree_optimize_tail_calls_1 (bool opt_tailcalls)
869
{
870
  edge e;
871
  bool phis_constructed = false;
872
  struct tailcall *tailcalls = NULL, *act, *next;
873
  bool changed = false;
874
  basic_block first = single_succ (ENTRY_BLOCK_PTR);
875
  tree stmt, param, ret_type, tmp, phi;
876
  edge_iterator ei;
877
 
878
  if (!suitable_for_tail_opt_p ())
879
    return;
880
  if (opt_tailcalls)
881
    opt_tailcalls = suitable_for_tail_call_opt_p ();
882
 
883
  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
884
    {
885
      /* Only traverse the normal exits, i.e. those that end with return
886
         statement.  */
887
      stmt = last_stmt (e->src);
888
 
889
      if (stmt
890
          && TREE_CODE (stmt) == RETURN_EXPR)
891
        find_tail_calls (e->src, &tailcalls);
892
    }
893
 
894
  /* Construct the phi nodes and accumulators if necessary.  */
895
  a_acc = m_acc = NULL_TREE;
896
  for (act = tailcalls; act; act = act->next)
897
    {
898
      if (!act->tail_recursion)
899
        continue;
900
 
901
      if (!phis_constructed)
902
        {
903
          /* Ensure that there is only one predecessor of the block.  */
904
          if (!single_pred_p (first))
905
            first = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
906
 
907
          /* Copy the args if needed.  */
908
          for (param = DECL_ARGUMENTS (current_function_decl);
909
               param;
910
               param = TREE_CHAIN (param))
911
            if (arg_needs_copy_p (param))
912
              {
913
                tree name = default_def (param);
914
                tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
915
                tree phi;
916
 
917
                set_default_def (param, new_name);
918
                phi = create_phi_node (name, first);
919
                SSA_NAME_DEF_STMT (name) = phi;
920
                add_phi_arg (phi, new_name, single_pred_edge (first));
921
              }
922
          phis_constructed = true;
923
        }
924
 
925
      if (act->add && !a_acc)
926
        {
927
          ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
928
 
929
          tmp = create_tmp_var (ret_type, "add_acc");
930
          add_referenced_var (tmp);
931
 
932
          phi = create_phi_node (tmp, first);
933
          add_phi_arg (phi,
934
                       /* RET_TYPE can be a float when -ffast-maths is
935
                          enabled.  */
936
                       fold_convert (ret_type, integer_zero_node),
937
                       single_pred_edge (first));
938
          a_acc = PHI_RESULT (phi);
939
        }
940
 
941
      if (act->mult && !m_acc)
942
        {
943
          ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
944
 
945
          tmp = create_tmp_var (ret_type, "mult_acc");
946
          add_referenced_var (tmp);
947
 
948
          phi = create_phi_node (tmp, first);
949
          add_phi_arg (phi,
950
                       /* RET_TYPE can be a float when -ffast-maths is
951
                          enabled.  */
952
                       fold_convert (ret_type, integer_one_node),
953
                       single_pred_edge (first));
954
          m_acc = PHI_RESULT (phi);
955
        }
956
    }
957
 
958
 
959
  if (phis_constructed)
960
    {
961
      /* Reverse the order of the phi nodes, so that it matches the order
962
         of operands of the function, as assumed by eliminate_tail_call.  */
963
      set_phi_nodes (first, phi_reverse (phi_nodes (first)));
964
    }
965
 
966
  for (; tailcalls; tailcalls = next)
967
    {
968
      next = tailcalls->next;
969
      changed |= optimize_tail_call (tailcalls, opt_tailcalls);
970
      free (tailcalls);
971
    }
972
 
973
  if (a_acc || m_acc)
974
    {
975
      /* Modify the remaining return statements.  */
976
      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
977
        {
978
          stmt = last_stmt (e->src);
979
 
980
          if (stmt
981
              && TREE_CODE (stmt) == RETURN_EXPR)
982
            adjust_return_value (e->src, m_acc, a_acc);
983
        }
984
    }
985
 
986
  if (changed)
987
    {
988
      free_dominance_info (CDI_DOMINATORS);
989
      cleanup_tree_cfg ();
990
    }
991
 
992
  if (phis_constructed)
993
    add_virtual_phis ();
994
}
995
 
996
static unsigned int
997
execute_tail_recursion (void)
998
{
999
  tree_optimize_tail_calls_1 (false);
1000
  return 0;
1001
}
1002
 
1003
static bool
1004
gate_tail_calls (void)
1005
{
1006
  return flag_optimize_sibling_calls != 0;
1007
}
1008
 
1009
static unsigned int
1010
execute_tail_calls (void)
1011
{
1012
  tree_optimize_tail_calls_1 (true);
1013
  return 0;
1014
}
1015
 
1016
struct tree_opt_pass pass_tail_recursion =
1017
{
1018
  "tailr",                              /* name */
1019
  gate_tail_calls,                      /* gate */
1020
  execute_tail_recursion,               /* execute */
1021
  NULL,                                 /* sub */
1022
  NULL,                                 /* next */
1023
  0,                                     /* static_pass_number */
1024
  0,                                     /* tv_id */
1025
  PROP_cfg | PROP_ssa | PROP_alias,     /* properties_required */
1026
  0,                                     /* properties_provided */
1027
  0,                                     /* properties_destroyed */
1028
  0,                                     /* todo_flags_start */
1029
  TODO_dump_func | TODO_verify_ssa,     /* todo_flags_finish */
1030
 
1031
};
1032
 
1033
struct tree_opt_pass pass_tail_calls =
1034
{
1035
  "tailc",                              /* name */
1036
  gate_tail_calls,                      /* gate */
1037
  execute_tail_calls,                   /* execute */
1038
  NULL,                                 /* sub */
1039
  NULL,                                 /* next */
1040
  0,                                     /* static_pass_number */
1041
  0,                                     /* tv_id */
1042
  PROP_cfg | PROP_ssa | PROP_alias,     /* properties_required */
1043
  0,                                     /* properties_provided */
1044
  0,                                     /* properties_destroyed */
1045
  0,                                     /* todo_flags_start */
1046
  TODO_dump_func | TODO_verify_ssa,     /* todo_flags_finish */
1047
 
1048
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.