OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [tree-stdarg.c] - Blame information for rev 280

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Pass computing data for optimizing stdarg functions.
2
   Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation, Inc.
3
   Contributed by Jakub Jelinek <jakub@redhat.com>
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify
8
it under the terms of the GNU General Public License as published by
9
the Free Software Foundation; either version 3, or (at your option)
10
any later version.
11
 
12
GCC is distributed in the hope that it will be useful,
13
but WITHOUT ANY WARRANTY; without even the implied warranty of
14
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15
GNU General Public License for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "tree.h"
26
#include "function.h"
27
#include "langhooks.h"
28
#include "diagnostic.h"
29
#include "target.h"
30
#include "tree-flow.h"
31
#include "tree-pass.h"
32
#include "tree-stdarg.h"
33
 
34
/* A simple pass that attempts to optimize stdarg functions on architectures
35
   that need to save register arguments to stack on entry to stdarg functions.
36
   If the function doesn't use any va_start macros, no registers need to
37
   be saved.  If va_start macros are used, the va_list variables don't escape
38
   the function, it is only necessary to save registers that will be used
39
   in va_arg macros.  E.g. if va_arg is only used with integral types
40
   in the function, floating point registers don't need to be saved, etc.  */
41
 
42
 
43
/* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
44
   is executed at most as many times as VA_START_BB.  */
45
 
46
static bool
47
reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
48
{
49
  VEC (edge, heap) *stack = NULL;
50
  edge e;
51
  edge_iterator ei;
52
  sbitmap visited;
53
  bool ret;
54
 
55
  if (va_arg_bb == va_start_bb)
56
    return true;
57
 
58
  if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
59
    return false;
60
 
61
  visited = sbitmap_alloc (last_basic_block);
62
  sbitmap_zero (visited);
63
  ret = true;
64
 
65
  FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
66
    VEC_safe_push (edge, heap, stack, e);
67
 
68
  while (! VEC_empty (edge, stack))
69
    {
70
      basic_block src;
71
 
72
      e = VEC_pop (edge, stack);
73
      src = e->src;
74
 
75
      if (e->flags & EDGE_COMPLEX)
76
        {
77
          ret = false;
78
          break;
79
        }
80
 
81
      if (src == va_start_bb)
82
        continue;
83
 
84
      /* va_arg_bb can be executed more times than va_start_bb.  */
85
      if (src == va_arg_bb)
86
        {
87
          ret = false;
88
          break;
89
        }
90
 
91
      gcc_assert (src != ENTRY_BLOCK_PTR);
92
 
93
      if (! TEST_BIT (visited, src->index))
94
        {
95
          SET_BIT (visited, src->index);
96
          FOR_EACH_EDGE (e, ei, src->preds)
97
            VEC_safe_push (edge, heap, stack, e);
98
        }
99
    }
100
 
101
  VEC_free (edge, heap, stack);
102
  sbitmap_free (visited);
103
  return ret;
104
}
105
 
106
 
107
/* For statement COUNTER = RHS, if RHS is COUNTER + constant,
108
   return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
109
   GPR_P is true if this is GPR counter.  */
110
 
111
static unsigned HOST_WIDE_INT
112
va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
113
                      bool gpr_p)
114
{
115
  tree lhs, orig_lhs;
116
  gimple stmt;
117
  unsigned HOST_WIDE_INT ret = 0, val, counter_val;
118
  unsigned int max_size;
119
 
120
  if (si->offsets == NULL)
121
    {
122
      unsigned int i;
123
 
124
      si->offsets = XNEWVEC (int, num_ssa_names);
125
      for (i = 0; i < num_ssa_names; ++i)
126
        si->offsets[i] = -1;
127
    }
128
 
129
  counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
130
  max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
131
  orig_lhs = lhs = rhs;
132
  while (lhs)
133
    {
134
      enum tree_code rhs_code;
135
 
136
      if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
137
        {
138
          if (counter_val >= max_size)
139
            {
140
              ret = max_size;
141
              break;
142
            }
143
 
144
          ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
145
          break;
146
        }
147
 
148
      stmt = SSA_NAME_DEF_STMT (lhs);
149
 
150
      if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
151
        return (unsigned HOST_WIDE_INT) -1;
152
 
153
      rhs_code = gimple_assign_rhs_code (stmt);
154
      if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
155
           || gimple_assign_cast_p (stmt))
156
          && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
157
        {
158
          lhs = gimple_assign_rhs1 (stmt);
159
          continue;
160
        }
161
 
162
      if ((rhs_code == POINTER_PLUS_EXPR
163
           || rhs_code == PLUS_EXPR)
164
          && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
165
          && host_integerp (gimple_assign_rhs2 (stmt), 1))
166
        {
167
          ret += tree_low_cst (gimple_assign_rhs2 (stmt), 1);
168
          lhs = gimple_assign_rhs1 (stmt);
169
          continue;
170
        }
171
 
172
      if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
173
        return (unsigned HOST_WIDE_INT) -1;
174
 
175
      rhs = gimple_assign_rhs1 (stmt);
176
      if (TREE_CODE (counter) != TREE_CODE (rhs))
177
        return (unsigned HOST_WIDE_INT) -1;
178
 
179
      if (TREE_CODE (counter) == COMPONENT_REF)
180
        {
181
          if (get_base_address (counter) != get_base_address (rhs)
182
              || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
183
              || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
184
            return (unsigned HOST_WIDE_INT) -1;
185
        }
186
      else if (counter != rhs)
187
        return (unsigned HOST_WIDE_INT) -1;
188
 
189
      lhs = NULL;
190
    }
191
 
192
  lhs = orig_lhs;
193
  val = ret + counter_val;
194
  while (lhs)
195
    {
196
      enum tree_code rhs_code;
197
 
198
      if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
199
        break;
200
 
201
      if (val >= max_size)
202
        si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
203
      else
204
        si->offsets[SSA_NAME_VERSION (lhs)] = val;
205
 
206
      stmt = SSA_NAME_DEF_STMT (lhs);
207
 
208
      rhs_code = gimple_assign_rhs_code (stmt);
209
      if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
210
           || gimple_assign_cast_p (stmt))
211
          && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
212
        {
213
          lhs = gimple_assign_rhs1 (stmt);
214
          continue;
215
        }
216
 
217
      if ((rhs_code == POINTER_PLUS_EXPR
218
           || rhs_code == PLUS_EXPR)
219
          && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
220
          && host_integerp (gimple_assign_rhs2 (stmt), 1))
221
        {
222
          val -= tree_low_cst (gimple_assign_rhs2 (stmt), 1);
223
          lhs = gimple_assign_rhs1 (stmt);
224
          continue;
225
        }
226
 
227
      lhs = NULL;
228
    }
229
 
230
  return ret;
231
}
232
 
233
 
234
/* Called by walk_tree to look for references to va_list variables.  */
235
 
236
static tree
237
find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
238
                        void *data)
239
{
240
  bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
241
  tree var = *tp;
242
 
243
  if (TREE_CODE (var) == SSA_NAME)
244
    var = SSA_NAME_VAR (var);
245
 
246
  if (TREE_CODE (var) == VAR_DECL
247
      && bitmap_bit_p (va_list_vars, DECL_UID (var)))
248
    return var;
249
 
250
  return NULL_TREE;
251
}
252
 
253
 
254
/* Helper function of va_list_counter_struct_op.  Compute
255
   cfun->va_list_{g,f}pr_size.  AP is a va_list GPR/FPR counter,
256
   if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
257
   statement.  GPR_P is true if AP is a GPR counter, false if it is
258
   a FPR counter.  */
259
 
260
static void
261
va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
262
                    bool write_p)
263
{
264
  unsigned HOST_WIDE_INT increment;
265
 
266
  if (si->compute_sizes < 0)
267
    {
268
      si->compute_sizes = 0;
269
      if (si->va_start_count == 1
270
          && reachable_at_most_once (si->bb, si->va_start_bb))
271
        si->compute_sizes = 1;
272
 
273
      if (dump_file && (dump_flags & TDF_DETAILS))
274
        fprintf (dump_file,
275
                 "bb%d will %sbe executed at most once for each va_start "
276
                 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
277
                 si->va_start_bb->index);
278
    }
279
 
280
  if (write_p
281
      && si->compute_sizes
282
      && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
283
    {
284
      if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
285
        {
286
          cfun->va_list_gpr_size += increment;
287
          return;
288
        }
289
 
290
      if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
291
        {
292
          cfun->va_list_fpr_size += increment;
293
          return;
294
        }
295
    }
296
 
297
  if (write_p || !si->compute_sizes)
298
    {
299
      if (gpr_p)
300
        cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
301
      else
302
        cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
303
    }
304
}
305
 
306
 
307
/* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
308
   If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
309
   is false, AP has been seen in VAR = AP assignment.
310
   Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
311
   va_arg operation that doesn't cause the va_list variable to escape
312
   current function.  */
313
 
314
static bool
315
va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
316
                           bool write_p)
317
{
318
  tree base;
319
 
320
  if (TREE_CODE (ap) != COMPONENT_REF
321
      || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
322
    return false;
323
 
324
  if (TREE_CODE (var) != SSA_NAME
325
      || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var))))
326
    return false;
327
 
328
  base = get_base_address (ap);
329
  if (TREE_CODE (base) != VAR_DECL
330
      || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
331
    return false;
332
 
333
  if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
334
    va_list_counter_op (si, ap, var, true, write_p);
335
  else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
336
    va_list_counter_op (si, ap, var, false, write_p);
337
 
338
  return true;
339
}
340
 
341
 
342
/* Check for TEM = AP.  Return true if found and the caller shouldn't
343
   search for va_list references in the statement.  */
344
 
345
static bool
346
va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
347
{
348
  if (TREE_CODE (ap) != VAR_DECL
349
      || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
350
    return false;
351
 
352
  if (TREE_CODE (tem) != SSA_NAME
353
      || bitmap_bit_p (si->va_list_vars,
354
                       DECL_UID (SSA_NAME_VAR (tem)))
355
      || is_global_var (SSA_NAME_VAR (tem)))
356
    return false;
357
 
358
  if (si->compute_sizes < 0)
359
    {
360
      si->compute_sizes = 0;
361
      if (si->va_start_count == 1
362
          && reachable_at_most_once (si->bb, si->va_start_bb))
363
        si->compute_sizes = 1;
364
 
365
      if (dump_file && (dump_flags & TDF_DETAILS))
366
        fprintf (dump_file,
367
                 "bb%d will %sbe executed at most once for each va_start "
368
                 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
369
                 si->va_start_bb->index);
370
    }
371
 
372
  /* For void * or char * va_list types, there is just one counter.
373
     If va_arg is used in a loop, we don't know how many registers need
374
     saving.  */
375
  if (! si->compute_sizes)
376
    return false;
377
 
378
  if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
379
    return false;
380
 
381
  /* Note the temporary, as we need to track whether it doesn't escape
382
     the current function.  */
383
  bitmap_set_bit (si->va_list_escape_vars,
384
                  DECL_UID (SSA_NAME_VAR (tem)));
385
  return true;
386
}
387
 
388
 
389
/* Check for:
390
     tem1 = AP;
391
     TEM2 = tem1 + CST;
392
     AP = TEM2;
393
   sequence and update cfun->va_list_gpr_size.  Return true if found.  */
394
 
395
static bool
396
va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
397
{
398
  unsigned HOST_WIDE_INT increment;
399
 
400
  if (TREE_CODE (ap) != VAR_DECL
401
      || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
402
    return false;
403
 
404
  if (TREE_CODE (tem2) != SSA_NAME
405
      || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2))))
406
    return false;
407
 
408
  if (si->compute_sizes <= 0)
409
    return false;
410
 
411
  increment = va_list_counter_bump (si, ap, tem2, true);
412
  if (increment + 1 <= 1)
413
    return false;
414
 
415
  if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
416
    cfun->va_list_gpr_size += increment;
417
  else
418
    cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
419
 
420
  return true;
421
}
422
 
423
 
424
/* If RHS is X, (some type *) X or X + CST for X a temporary variable
425
   containing value of some va_list variable plus optionally some constant,
426
   either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
427
   depending whether LHS is a function local temporary.  */
428
 
429
static void
430
check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
431
{
432
  if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
433
    return;
434
 
435
  if (TREE_CODE (rhs) != SSA_NAME
436
      || ! bitmap_bit_p (si->va_list_escape_vars,
437
                         DECL_UID (SSA_NAME_VAR (rhs))))
438
    return;
439
 
440
  if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs)))
441
    {
442
      si->va_list_escapes = true;
443
      return;
444
    }
445
 
446
  if (si->compute_sizes < 0)
447
    {
448
      si->compute_sizes = 0;
449
      if (si->va_start_count == 1
450
          && reachable_at_most_once (si->bb, si->va_start_bb))
451
        si->compute_sizes = 1;
452
 
453
      if (dump_file && (dump_flags & TDF_DETAILS))
454
        fprintf (dump_file,
455
                 "bb%d will %sbe executed at most once for each va_start "
456
                 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
457
                 si->va_start_bb->index);
458
    }
459
 
460
  /* For void * or char * va_list types, there is just one counter.
461
     If va_arg is used in a loop, we don't know how many registers need
462
     saving.  */
463
  if (! si->compute_sizes)
464
    {
465
      si->va_list_escapes = true;
466
      return;
467
    }
468
 
469
  if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
470
      == (unsigned HOST_WIDE_INT) -1)
471
    {
472
      si->va_list_escapes = true;
473
      return;
474
    }
475
 
476
  bitmap_set_bit (si->va_list_escape_vars,
477
                  DECL_UID (SSA_NAME_VAR (lhs)));
478
}
479
 
480
 
481
/* Check all uses of temporaries from si->va_list_escape_vars bitmap.
482
   Return true if va_list might be escaping.  */
483
 
484
static bool
485
check_all_va_list_escapes (struct stdarg_info *si)
486
{
487
  basic_block bb;
488
 
489
  FOR_EACH_BB (bb)
490
    {
491
      gimple_stmt_iterator i;
492
 
493
      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
494
        {
495
          gimple stmt = gsi_stmt (i);
496
          tree use;
497
          ssa_op_iter iter;
498
 
499
          if (is_gimple_debug (stmt))
500
            continue;
501
 
502
          FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
503
            {
504
              if (! bitmap_bit_p (si->va_list_escape_vars,
505
                                  DECL_UID (SSA_NAME_VAR (use))))
506
                continue;
507
 
508
              if (is_gimple_assign (stmt))
509
                {
510
                  tree rhs = gimple_assign_rhs1 (stmt);
511
                  enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
512
 
513
                  /* x = *ap_temp;  */
514
                  if (gimple_assign_rhs_code (stmt) == INDIRECT_REF
515
                      && TREE_OPERAND (rhs, 0) == use
516
                      && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
517
                      && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
518
                      && si->offsets[SSA_NAME_VERSION (use)] != -1)
519
                    {
520
                      unsigned HOST_WIDE_INT gpr_size;
521
                      tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
522
 
523
                      gpr_size = si->offsets[SSA_NAME_VERSION (use)]
524
                                 + tree_low_cst (access_size, 1);
525
                      if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
526
                        cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
527
                      else if (gpr_size > cfun->va_list_gpr_size)
528
                        cfun->va_list_gpr_size = gpr_size;
529
                      continue;
530
                    }
531
 
532
                  /* va_arg sequences may contain
533
                     other_ap_temp = ap_temp;
534
                     other_ap_temp = ap_temp + constant;
535
                     other_ap_temp = (some_type *) ap_temp;
536
                     ap = ap_temp;
537
                     statements.  */
538
                  if (rhs == use
539
                      && ((rhs_code == POINTER_PLUS_EXPR
540
                           && (TREE_CODE (gimple_assign_rhs2 (stmt))
541
                               == INTEGER_CST))
542
                          || gimple_assign_cast_p (stmt)
543
                          || (get_gimple_rhs_class (rhs_code)
544
                              == GIMPLE_SINGLE_RHS)))
545
                    {
546
                      tree lhs = gimple_assign_lhs (stmt);
547
 
548
                      if (TREE_CODE (lhs) == SSA_NAME
549
                          && bitmap_bit_p (si->va_list_escape_vars,
550
                                           DECL_UID (SSA_NAME_VAR (lhs))))
551
                        continue;
552
 
553
                      if (TREE_CODE (lhs) == VAR_DECL
554
                          && bitmap_bit_p (si->va_list_vars,
555
                                           DECL_UID (lhs)))
556
                        continue;
557
                    }
558
                }
559
 
560
              if (dump_file && (dump_flags & TDF_DETAILS))
561
                {
562
                  fputs ("va_list escapes in ", dump_file);
563
                  print_gimple_stmt (dump_file, stmt, 0, dump_flags);
564
                  fputc ('\n', dump_file);
565
                }
566
              return true;
567
            }
568
        }
569
    }
570
 
571
  return false;
572
}
573
 
574
 
575
/* Return true if this optimization pass should be done.
576
   It makes only sense for stdarg functions.  */
577
 
578
static bool
579
gate_optimize_stdarg (void)
580
{
581
  /* This optimization is only for stdarg functions.  */
582
  return cfun->stdarg != 0;
583
}
584
 
585
 
586
/* Entry point to the stdarg optimization pass.  */
587
 
588
static unsigned int
589
execute_optimize_stdarg (void)
590
{
591
  basic_block bb;
592
  bool va_list_escapes = false;
593
  bool va_list_simple_ptr;
594
  struct stdarg_info si;
595
  struct walk_stmt_info wi;
596
  const char *funcname = NULL;
597
  tree cfun_va_list;
598
 
599
  cfun->va_list_gpr_size = 0;
600
  cfun->va_list_fpr_size = 0;
601
  memset (&si, 0, sizeof (si));
602
  si.va_list_vars = BITMAP_ALLOC (NULL);
603
  si.va_list_escape_vars = BITMAP_ALLOC (NULL);
604
 
605
  if (dump_file)
606
    funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
607
 
608
  cfun_va_list = targetm.fn_abi_va_list (cfun->decl);
609
  va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
610
                       && (TREE_TYPE (cfun_va_list) == void_type_node
611
                           || TREE_TYPE (cfun_va_list) == char_type_node);
612
  gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
613
 
614
  FOR_EACH_BB (bb)
615
    {
616
      gimple_stmt_iterator i;
617
 
618
      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
619
        {
620
          gimple stmt = gsi_stmt (i);
621
          tree callee, ap;
622
 
623
          if (!is_gimple_call (stmt))
624
            continue;
625
 
626
          callee = gimple_call_fndecl (stmt);
627
          if (!callee
628
              || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
629
            continue;
630
 
631
          switch (DECL_FUNCTION_CODE (callee))
632
            {
633
            case BUILT_IN_VA_START:
634
              break;
635
              /* If old style builtins are used, don't optimize anything.  */
636
            case BUILT_IN_SAVEREGS:
637
            case BUILT_IN_ARGS_INFO:
638
            case BUILT_IN_NEXT_ARG:
639
              va_list_escapes = true;
640
              continue;
641
            default:
642
              continue;
643
            }
644
 
645
          si.va_start_count++;
646
          ap = gimple_call_arg (stmt, 0);
647
 
648
          if (TREE_CODE (ap) != ADDR_EXPR)
649
            {
650
              va_list_escapes = true;
651
              break;
652
            }
653
          ap = TREE_OPERAND (ap, 0);
654
          if (TREE_CODE (ap) == ARRAY_REF)
655
            {
656
              if (! integer_zerop (TREE_OPERAND (ap, 1)))
657
                {
658
                  va_list_escapes = true;
659
                  break;
660
                }
661
              ap = TREE_OPERAND (ap, 0);
662
            }
663
          if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
664
              != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (cfun->decl))
665
              || TREE_CODE (ap) != VAR_DECL)
666
            {
667
              va_list_escapes = true;
668
              break;
669
            }
670
 
671
          if (is_global_var (ap))
672
            {
673
              va_list_escapes = true;
674
              break;
675
            }
676
 
677
          bitmap_set_bit (si.va_list_vars, DECL_UID (ap));
678
 
679
          /* VA_START_BB and VA_START_AP will be only used if there is just
680
             one va_start in the function.  */
681
          si.va_start_bb = bb;
682
          si.va_start_ap = ap;
683
        }
684
 
685
      if (va_list_escapes)
686
        break;
687
    }
688
 
689
  /* If there were no va_start uses in the function, there is no need to
690
     save anything.  */
691
  if (si.va_start_count == 0)
692
    goto finish;
693
 
694
  /* If some va_list arguments weren't local, we can't optimize.  */
695
  if (va_list_escapes)
696
    goto finish;
697
 
698
  /* For void * or char * va_list, something useful can be done only
699
     if there is just one va_start.  */
700
  if (va_list_simple_ptr && si.va_start_count > 1)
701
    {
702
      va_list_escapes = true;
703
      goto finish;
704
    }
705
 
706
  /* For struct * va_list, if the backend didn't tell us what the counter fields
707
     are, there is nothing more we can do.  */
708
  if (!va_list_simple_ptr
709
      && va_list_gpr_counter_field == NULL_TREE
710
      && va_list_fpr_counter_field == NULL_TREE)
711
    {
712
      va_list_escapes = true;
713
      goto finish;
714
    }
715
 
716
  /* For void * or char * va_list there is just one counter
717
     (va_list itself).  Use VA_LIST_GPR_SIZE for it.  */
718
  if (va_list_simple_ptr)
719
    cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
720
 
721
  calculate_dominance_info (CDI_DOMINATORS);
722
  memset (&wi, 0, sizeof (wi));
723
  wi.info = si.va_list_vars;
724
 
725
  FOR_EACH_BB (bb)
726
    {
727
      gimple_stmt_iterator i;
728
 
729
      si.compute_sizes = -1;
730
      si.bb = bb;
731
 
732
      /* For va_list_simple_ptr, we have to check PHI nodes too.  We treat
733
         them as assignments for the purpose of escape analysis.  This is
734
         not needed for non-simple va_list because virtual phis don't perform
735
         any real data movement.  */
736
      if (va_list_simple_ptr)
737
        {
738
          tree lhs, rhs;
739
          use_operand_p uop;
740
          ssa_op_iter soi;
741
 
742
          for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
743
            {
744
              gimple phi = gsi_stmt (i);
745
              lhs = PHI_RESULT (phi);
746
 
747
              if (!is_gimple_reg (lhs))
748
                continue;
749
 
750
              FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
751
                {
752
                  rhs = USE_FROM_PTR (uop);
753
                  if (va_list_ptr_read (&si, rhs, lhs))
754
                    continue;
755
                  else if (va_list_ptr_write (&si, lhs, rhs))
756
                    continue;
757
                  else
758
                    check_va_list_escapes (&si, lhs, rhs);
759
 
760
                  if (si.va_list_escapes)
761
                    {
762
                      if (dump_file && (dump_flags & TDF_DETAILS))
763
                        {
764
                          fputs ("va_list escapes in ", dump_file);
765
                          print_gimple_stmt (dump_file, phi, 0, dump_flags);
766
                          fputc ('\n', dump_file);
767
                        }
768
                      va_list_escapes = true;
769
                    }
770
                }
771
            }
772
        }
773
 
774
      for (i = gsi_start_bb (bb);
775
           !gsi_end_p (i) && !va_list_escapes;
776
           gsi_next (&i))
777
        {
778
          gimple stmt = gsi_stmt (i);
779
 
780
          /* Don't look at __builtin_va_{start,end}, they are ok.  */
781
          if (is_gimple_call (stmt))
782
            {
783
              tree callee = gimple_call_fndecl (stmt);
784
 
785
              if (callee
786
                  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
787
                  && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
788
                      || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
789
                continue;
790
            }
791
 
792
          if (is_gimple_assign (stmt))
793
            {
794
              tree lhs = gimple_assign_lhs (stmt);
795
              tree rhs = gimple_assign_rhs1 (stmt);
796
 
797
              if (va_list_simple_ptr)
798
                {
799
                  if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
800
                      == GIMPLE_SINGLE_RHS)
801
                    {
802
                      /* Check for tem = ap.  */
803
                      if (va_list_ptr_read (&si, rhs, lhs))
804
                        continue;
805
 
806
                      /* Check for the last insn in:
807
                         tem1 = ap;
808
                         tem2 = tem1 + CST;
809
                         ap = tem2;
810
                         sequence.  */
811
                      else if (va_list_ptr_write (&si, lhs, rhs))
812
                        continue;
813
                    }
814
 
815
                  if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
816
                       && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
817
                      || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
818
                      || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
819
                          == GIMPLE_SINGLE_RHS))
820
                    check_va_list_escapes (&si, lhs, rhs);
821
                }
822
              else
823
                {
824
                  if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
825
                      == GIMPLE_SINGLE_RHS)
826
                    {
827
                      /* Check for ap[0].field = temp.  */
828
                      if (va_list_counter_struct_op (&si, lhs, rhs, true))
829
                        continue;
830
 
831
                      /* Check for temp = ap[0].field.  */
832
                      else if (va_list_counter_struct_op (&si, rhs, lhs,
833
                                                          false))
834
                        continue;
835
                    }
836
 
837
                  /* Do any architecture specific checking.  */
838
                  if (targetm.stdarg_optimize_hook
839
                      && targetm.stdarg_optimize_hook (&si, stmt))
840
                    continue;
841
                }
842
            }
843
          else if (is_gimple_debug (stmt))
844
            continue;
845
 
846
          /* All other uses of va_list are either va_copy (that is not handled
847
             in this optimization), taking address of va_list variable or
848
             passing va_list to other functions (in that case va_list might
849
             escape the function and therefore va_start needs to set it up
850
             fully), or some unexpected use of va_list.  None of these should
851
             happen in a gimplified VA_ARG_EXPR.  */
852
          if (si.va_list_escapes
853
              || walk_gimple_op (stmt, find_va_list_reference, &wi))
854
            {
855
              if (dump_file && (dump_flags & TDF_DETAILS))
856
                {
857
                  fputs ("va_list escapes in ", dump_file);
858
                  print_gimple_stmt (dump_file, stmt, 0, dump_flags);
859
                  fputc ('\n', dump_file);
860
                }
861
              va_list_escapes = true;
862
            }
863
        }
864
 
865
      if (va_list_escapes)
866
        break;
867
    }
868
 
869
  if (! va_list_escapes
870
      && va_list_simple_ptr
871
      && ! bitmap_empty_p (si.va_list_escape_vars)
872
      && check_all_va_list_escapes (&si))
873
    va_list_escapes = true;
874
 
875
finish:
876
  if (va_list_escapes)
877
    {
878
      cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
879
      cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
880
    }
881
  BITMAP_FREE (si.va_list_vars);
882
  BITMAP_FREE (si.va_list_escape_vars);
883
  free (si.offsets);
884
  if (dump_file)
885
    {
886
      fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
887
               funcname, (int) va_list_escapes);
888
      if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
889
        fputs ("all", dump_file);
890
      else
891
        fprintf (dump_file, "%d", cfun->va_list_gpr_size);
892
      fputs (" GPR units and ", dump_file);
893
      if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
894
        fputs ("all", dump_file);
895
      else
896
        fprintf (dump_file, "%d", cfun->va_list_fpr_size);
897
      fputs (" FPR units.\n", dump_file);
898
    }
899
  return 0;
900
}
901
 
902
 
903
struct gimple_opt_pass pass_stdarg =
904
{
905
 {
906
  GIMPLE_PASS,
907
  "stdarg",                             /* name */
908
  gate_optimize_stdarg,                 /* gate */
909
  execute_optimize_stdarg,              /* execute */
910
  NULL,                                 /* sub */
911
  NULL,                                 /* next */
912
  0,                                     /* static_pass_number */
913
  TV_NONE,                              /* tv_id */
914
  PROP_cfg | PROP_ssa,                  /* properties_required */
915
  0,                                     /* properties_provided */
916
  0,                                     /* properties_destroyed */
917
  0,                                     /* todo_flags_start */
918
  TODO_dump_func                        /* todo_flags_finish */
919
 }
920
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.