OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.2.2/] [gcc/] [tree-stdarg.c] - Blame information for rev 38

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Pass computing data for optimizing stdarg functions.
2
   Copyright (C) 2004, 2005, 2007 Free Software Foundation, Inc.
3
   Contributed by Jakub Jelinek <jakub@redhat.com>
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify
8
it under the terms of the GNU General Public License as published by
9
the Free Software Foundation; either version 3, or (at your option)
10
any later version.
11
 
12
GCC is distributed in the hope that it will be useful,
13
but WITHOUT ANY WARRANTY; without even the implied warranty of
14
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15
GNU General Public License for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "tree.h"
26
#include "function.h"
27
#include "langhooks.h"
28
#include "diagnostic.h"
29
#include "target.h"
30
#include "tree-flow.h"
31
#include "tree-pass.h"
32
#include "tree-stdarg.h"
33
 
34
/* A simple pass that attempts to optimize stdarg functions on architectures
35
   that need to save register arguments to stack on entry to stdarg functions.
36
   If the function doesn't use any va_start macros, no registers need to
37
   be saved.  If va_start macros are used, the va_list variables don't escape
38
   the function, it is only necessary to save registers that will be used
39
   in va_arg macros.  E.g. if va_arg is only used with integral types
40
   in the function, floating point registers don't need to be saved, etc.  */
41
 
42
 
43
/* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
44
   is executed at most as many times as VA_START_BB.  */
45
 
46
static bool
47
reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
48
{
49
  edge *stack, e;
50
  edge_iterator ei;
51
  int sp;
52
  sbitmap visited;
53
  bool ret;
54
 
55
  if (va_arg_bb == va_start_bb)
56
    return true;
57
 
58
  if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
59
    return false;
60
 
61
  stack = XNEWVEC (edge, n_basic_blocks + 1);
62
  sp = 0;
63
 
64
  visited = sbitmap_alloc (last_basic_block);
65
  sbitmap_zero (visited);
66
  ret = true;
67
 
68
  FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
69
    stack[sp++] = e;
70
 
71
  while (sp)
72
    {
73
      basic_block src;
74
 
75
      --sp;
76
      e = stack[sp];
77
      src = e->src;
78
 
79
      if (e->flags & EDGE_COMPLEX)
80
        {
81
          ret = false;
82
          break;
83
        }
84
 
85
      if (src == va_start_bb)
86
        continue;
87
 
88
      /* va_arg_bb can be executed more times than va_start_bb.  */
89
      if (src == va_arg_bb)
90
        {
91
          ret = false;
92
          break;
93
        }
94
 
95
      gcc_assert (src != ENTRY_BLOCK_PTR);
96
 
97
      if (! TEST_BIT (visited, src->index))
98
        {
99
          SET_BIT (visited, src->index);
100
          FOR_EACH_EDGE (e, ei, src->preds)
101
            stack[sp++] = e;
102
        }
103
    }
104
 
105
  free (stack);
106
  sbitmap_free (visited);
107
  return ret;
108
}
109
 
110
 
111
/* For statement COUNTER = RHS, if RHS is COUNTER + constant,
112
   return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
113
   GPR_P is true if this is GPR counter.  */
114
 
115
static unsigned HOST_WIDE_INT
116
va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
117
                      bool gpr_p)
118
{
119
  tree stmt, lhs, orig_lhs;
120
  unsigned HOST_WIDE_INT ret = 0, val, counter_val;
121
  unsigned int max_size;
122
 
123
  if (si->offsets == NULL)
124
    {
125
      unsigned int i;
126
 
127
      si->offsets = XNEWVEC (int, num_ssa_names);
128
      for (i = 0; i < num_ssa_names; ++i)
129
        si->offsets[i] = -1;
130
    }
131
 
132
  counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
133
  max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
134
  orig_lhs = lhs = rhs;
135
  while (lhs)
136
    {
137
      if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
138
        {
139
          if (counter_val >= max_size)
140
            {
141
              ret = max_size;
142
              break;
143
            }
144
 
145
          ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
146
          break;
147
        }
148
 
149
      stmt = SSA_NAME_DEF_STMT (lhs);
150
 
151
      if (TREE_CODE (stmt) != MODIFY_EXPR
152
          || TREE_OPERAND (stmt, 0) != lhs)
153
        return (unsigned HOST_WIDE_INT) -1;
154
 
155
      rhs = TREE_OPERAND (stmt, 1);
156
      if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
157
        rhs = TREE_OPERAND (rhs, 0);
158
 
159
      if (TREE_CODE (rhs) == SSA_NAME)
160
        {
161
          lhs = rhs;
162
          continue;
163
        }
164
 
165
      if ((TREE_CODE (rhs) == NOP_EXPR
166
           || TREE_CODE (rhs) == CONVERT_EXPR)
167
          && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
168
        {
169
          lhs = TREE_OPERAND (rhs, 0);
170
          continue;
171
        }
172
 
173
      if (TREE_CODE (rhs) == PLUS_EXPR
174
          && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
175
          && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
176
          && host_integerp (TREE_OPERAND (rhs, 1), 1))
177
        {
178
          ret += tree_low_cst (TREE_OPERAND (rhs, 1), 1);
179
          lhs = TREE_OPERAND (rhs, 0);
180
          continue;
181
        }
182
 
183
      if (TREE_CODE (counter) != TREE_CODE (rhs))
184
        return (unsigned HOST_WIDE_INT) -1;
185
 
186
      if (TREE_CODE (counter) == COMPONENT_REF)
187
        {
188
          if (get_base_address (counter) != get_base_address (rhs)
189
              || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
190
              || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
191
            return (unsigned HOST_WIDE_INT) -1;
192
        }
193
      else if (counter != rhs)
194
        return (unsigned HOST_WIDE_INT) -1;
195
 
196
      lhs = NULL;
197
    }
198
 
199
  lhs = orig_lhs;
200
  val = ret + counter_val;
201
  while (lhs)
202
    {
203
      if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
204
        break;
205
 
206
      if (val >= max_size)
207
        si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
208
      else
209
        si->offsets[SSA_NAME_VERSION (lhs)] = val;
210
 
211
      stmt = SSA_NAME_DEF_STMT (lhs);
212
 
213
      rhs = TREE_OPERAND (stmt, 1);
214
      if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
215
        rhs = TREE_OPERAND (rhs, 0);
216
 
217
      if (TREE_CODE (rhs) == SSA_NAME)
218
        {
219
          lhs = rhs;
220
          continue;
221
        }
222
 
223
      if ((TREE_CODE (rhs) == NOP_EXPR
224
           || TREE_CODE (rhs) == CONVERT_EXPR)
225
          && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
226
        {
227
          lhs = TREE_OPERAND (rhs, 0);
228
          continue;
229
        }
230
 
231
      if (TREE_CODE (rhs) == PLUS_EXPR
232
          && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
233
          && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
234
          && host_integerp (TREE_OPERAND (rhs, 1), 1))
235
        {
236
          val -= tree_low_cst (TREE_OPERAND (rhs, 1), 1);
237
          lhs = TREE_OPERAND (rhs, 0);
238
          continue;
239
        }
240
 
241
      lhs = NULL;
242
    }
243
 
244
  return ret;
245
}
246
 
247
 
248
/* Called by walk_tree to look for references to va_list variables.  */
249
 
250
static tree
251
find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
252
                        void *data)
253
{
254
  bitmap va_list_vars = (bitmap) data;
255
  tree var = *tp;
256
 
257
  if (TREE_CODE (var) == SSA_NAME)
258
    var = SSA_NAME_VAR (var);
259
 
260
  if (TREE_CODE (var) == VAR_DECL
261
      && bitmap_bit_p (va_list_vars, DECL_UID (var)))
262
    return var;
263
 
264
  return NULL_TREE;
265
}
266
 
267
 
268
/* Helper function of va_list_counter_struct_op.  Compute
269
   cfun->va_list_{g,f}pr_size.  AP is a va_list GPR/FPR counter,
270
   if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
271
   statement.  GPR_P is true if AP is a GPR counter, false if it is
272
   a FPR counter.  */
273
 
274
static void
275
va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
276
                    bool write_p)
277
{
278
  unsigned HOST_WIDE_INT increment;
279
 
280
  if (si->compute_sizes < 0)
281
    {
282
      si->compute_sizes = 0;
283
      if (si->va_start_count == 1
284
          && reachable_at_most_once (si->bb, si->va_start_bb))
285
        si->compute_sizes = 1;
286
 
287
      if (dump_file && (dump_flags & TDF_DETAILS))
288
        fprintf (dump_file,
289
                 "bb%d will %sbe executed at most once for each va_start "
290
                 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
291
                 si->va_start_bb->index);
292
    }
293
 
294
  if (write_p
295
      && si->compute_sizes
296
      && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
297
    {
298
      if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
299
        {
300
          cfun->va_list_gpr_size += increment;
301
          return;
302
        }
303
 
304
      if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
305
        {
306
          cfun->va_list_fpr_size += increment;
307
          return;
308
        }
309
    }
310
 
311
  if (write_p || !si->compute_sizes)
312
    {
313
      if (gpr_p)
314
        cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
315
      else
316
        cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
317
    }
318
}
319
 
320
 
321
/* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
322
   If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
323
   is false, AP has been seen in VAR = AP assignment.
324
   Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
325
   va_arg operation that doesn't cause the va_list variable to escape
326
   current function.  */
327
 
328
static bool
329
va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
330
                           bool write_p)
331
{
332
  tree base;
333
 
334
  if (TREE_CODE (ap) != COMPONENT_REF
335
      || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
336
    return false;
337
 
338
  if (TREE_CODE (var) != SSA_NAME
339
      || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var))))
340
    return false;
341
 
342
  base = get_base_address (ap);
343
  if (TREE_CODE (base) != VAR_DECL
344
      || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
345
    return false;
346
 
347
  if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
348
    va_list_counter_op (si, ap, var, true, write_p);
349
  else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
350
    va_list_counter_op (si, ap, var, false, write_p);
351
 
352
  return true;
353
}
354
 
355
 
356
/* Check for TEM = AP.  Return true if found and the caller shouldn't
357
   search for va_list references in the statement.  */
358
 
359
static bool
360
va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
361
{
362
  if (TREE_CODE (ap) != VAR_DECL
363
      || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
364
    return false;
365
 
366
  if (TREE_CODE (tem) != SSA_NAME
367
      || bitmap_bit_p (si->va_list_vars,
368
                       DECL_UID (SSA_NAME_VAR (tem)))
369
      || is_global_var (SSA_NAME_VAR (tem)))
370
    return false;
371
 
372
  if (si->compute_sizes < 0)
373
    {
374
      si->compute_sizes = 0;
375
      if (si->va_start_count == 1
376
          && reachable_at_most_once (si->bb, si->va_start_bb))
377
        si->compute_sizes = 1;
378
 
379
      if (dump_file && (dump_flags & TDF_DETAILS))
380
        fprintf (dump_file,
381
                 "bb%d will %sbe executed at most once for each va_start "
382
                 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
383
                 si->va_start_bb->index);
384
    }
385
 
386
  /* For void * or char * va_list types, there is just one counter.
387
     If va_arg is used in a loop, we don't know how many registers need
388
     saving.  */
389
  if (! si->compute_sizes)
390
    return false;
391
 
392
  if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
393
    return false;
394
 
395
  /* Note the temporary, as we need to track whether it doesn't escape
396
     the current function.  */
397
  bitmap_set_bit (si->va_list_escape_vars,
398
                  DECL_UID (SSA_NAME_VAR (tem)));
399
  return true;
400
}
401
 
402
 
403
/* Check for:
404
     tem1 = AP;
405
     TEM2 = tem1 + CST;
406
     AP = TEM2;
407
   sequence and update cfun->va_list_gpr_size.  Return true if found.  */
408
 
409
static bool
410
va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
411
{
412
  unsigned HOST_WIDE_INT increment;
413
 
414
  if (TREE_CODE (ap) != VAR_DECL
415
      || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
416
    return false;
417
 
418
  if (TREE_CODE (tem2) != SSA_NAME
419
      || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2))))
420
    return false;
421
 
422
  if (si->compute_sizes <= 0)
423
    return false;
424
 
425
  increment = va_list_counter_bump (si, ap, tem2, true);
426
  if (increment + 1 <= 1)
427
    return false;
428
 
429
  if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
430
    cfun->va_list_gpr_size += increment;
431
  else
432
    cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
433
 
434
  return true;
435
}
436
 
437
 
438
/* If RHS is X, (some type *) X or X + CST for X a temporary variable
439
   containing value of some va_list variable plus optionally some constant,
440
   either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
441
   depending whether LHS is a function local temporary.  */
442
 
443
static void
444
check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
445
{
446
  if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
447
    return;
448
 
449
  if ((TREE_CODE (rhs) == PLUS_EXPR
450
       && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
451
      || TREE_CODE (rhs) == NOP_EXPR
452
      || TREE_CODE (rhs) == CONVERT_EXPR)
453
    rhs = TREE_OPERAND (rhs, 0);
454
 
455
  if (TREE_CODE (rhs) != SSA_NAME
456
      || ! bitmap_bit_p (si->va_list_escape_vars,
457
                         DECL_UID (SSA_NAME_VAR (rhs))))
458
    return;
459
 
460
  if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs)))
461
    {
462
      si->va_list_escapes = true;
463
      return;
464
    }
465
 
466
  if (si->compute_sizes < 0)
467
    {
468
      si->compute_sizes = 0;
469
      if (si->va_start_count == 1
470
          && reachable_at_most_once (si->bb, si->va_start_bb))
471
        si->compute_sizes = 1;
472
 
473
      if (dump_file && (dump_flags & TDF_DETAILS))
474
        fprintf (dump_file,
475
                 "bb%d will %sbe executed at most once for each va_start "
476
                 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
477
                 si->va_start_bb->index);
478
    }
479
 
480
  /* For void * or char * va_list types, there is just one counter.
481
     If va_arg is used in a loop, we don't know how many registers need
482
     saving.  */
483
  if (! si->compute_sizes)
484
    {
485
      si->va_list_escapes = true;
486
      return;
487
    }
488
 
489
  if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
490
      == (unsigned HOST_WIDE_INT) -1)
491
    {
492
      si->va_list_escapes = true;
493
      return;
494
    }
495
 
496
  bitmap_set_bit (si->va_list_escape_vars,
497
                  DECL_UID (SSA_NAME_VAR (lhs)));
498
}
499
 
500
 
501
/* Check all uses of temporaries from si->va_list_escape_vars bitmap.
502
   Return true if va_list might be escaping.  */
503
 
504
static bool
505
check_all_va_list_escapes (struct stdarg_info *si)
506
{
507
  basic_block bb;
508
 
509
  FOR_EACH_BB (bb)
510
    {
511
      block_stmt_iterator i;
512
 
513
      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
514
        {
515
          tree stmt = bsi_stmt (i), use;
516
          ssa_op_iter iter;
517
 
518
          FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
519
            {
520
              if (! bitmap_bit_p (si->va_list_escape_vars,
521
                                  DECL_UID (SSA_NAME_VAR (use))))
522
                continue;
523
 
524
              if (TREE_CODE (stmt) == MODIFY_EXPR)
525
                {
526
                  tree lhs = TREE_OPERAND (stmt, 0);
527
                  tree rhs = TREE_OPERAND (stmt, 1);
528
 
529
                  if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
530
                    rhs = TREE_OPERAND (rhs, 0);
531
 
532
                  /* x = *ap_temp;  */
533
                  if (TREE_CODE (rhs) == INDIRECT_REF
534
                      && TREE_OPERAND (rhs, 0) == use
535
                      && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
536
                      && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
537
                      && si->offsets[SSA_NAME_VERSION (use)] != -1)
538
                    {
539
                      unsigned HOST_WIDE_INT gpr_size;
540
                      tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
541
 
542
                      gpr_size = si->offsets[SSA_NAME_VERSION (use)]
543
                                 + tree_low_cst (access_size, 1);
544
                      if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
545
                        cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
546
                      else if (gpr_size > cfun->va_list_gpr_size)
547
                        cfun->va_list_gpr_size = gpr_size;
548
                      continue;
549
                    }
550
 
551
                  /* va_arg sequences may contain
552
                     other_ap_temp = ap_temp;
553
                     other_ap_temp = ap_temp + constant;
554
                     other_ap_temp = (some_type *) ap_temp;
555
                     ap = ap_temp;
556
                     statements.  */
557
                  if ((TREE_CODE (rhs) == PLUS_EXPR
558
                       && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
559
                      || TREE_CODE (rhs) == NOP_EXPR
560
                      || TREE_CODE (rhs) == CONVERT_EXPR)
561
                    rhs = TREE_OPERAND (rhs, 0);
562
 
563
                  if (rhs == use)
564
                    {
565
                      if (TREE_CODE (lhs) == SSA_NAME
566
                          && bitmap_bit_p (si->va_list_escape_vars,
567
                                           DECL_UID (SSA_NAME_VAR (lhs))))
568
                        continue;
569
 
570
                      if (TREE_CODE (lhs) == VAR_DECL
571
                          && bitmap_bit_p (si->va_list_vars,
572
                                           DECL_UID (lhs)))
573
                        continue;
574
                    }
575
                }
576
 
577
              if (dump_file && (dump_flags & TDF_DETAILS))
578
                {
579
                  fputs ("va_list escapes in ", dump_file);
580
                  print_generic_expr (dump_file, stmt, dump_flags);
581
                  fputc ('\n', dump_file);
582
                }
583
              return true;
584
            }
585
        }
586
    }
587
 
588
  return false;
589
}
590
 
591
 
592
/* Return true if this optimization pass should be done.
593
   It makes only sense for stdarg functions.  */
594
 
595
static bool
596
gate_optimize_stdarg (void)
597
{
598
  /* This optimization is only for stdarg functions.  */
599
  return current_function_stdarg != 0;
600
}
601
 
602
 
603
/* Entry point to the stdarg optimization pass.  */
604
 
605
static unsigned int
606
execute_optimize_stdarg (void)
607
{
608
  basic_block bb;
609
  bool va_list_escapes = false;
610
  bool va_list_simple_ptr;
611
  struct stdarg_info si;
612
  const char *funcname = NULL;
613
 
614
  cfun->va_list_gpr_size = 0;
615
  cfun->va_list_fpr_size = 0;
616
  memset (&si, 0, sizeof (si));
617
  si.va_list_vars = BITMAP_ALLOC (NULL);
618
  si.va_list_escape_vars = BITMAP_ALLOC (NULL);
619
 
620
  if (dump_file)
621
    funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
622
 
623
  va_list_simple_ptr = POINTER_TYPE_P (va_list_type_node)
624
                       && (TREE_TYPE (va_list_type_node) == void_type_node
625
                           || TREE_TYPE (va_list_type_node) == char_type_node);
626
  gcc_assert (is_gimple_reg_type (va_list_type_node) == va_list_simple_ptr);
627
 
628
  FOR_EACH_BB (bb)
629
    {
630
      block_stmt_iterator i;
631
 
632
      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
633
        {
634
          tree stmt = bsi_stmt (i);
635
          tree call = get_call_expr_in (stmt), callee;
636
          tree ap;
637
 
638
          if (!call)
639
            continue;
640
 
641
          callee = get_callee_fndecl (call);
642
          if (!callee
643
              || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
644
            continue;
645
 
646
          switch (DECL_FUNCTION_CODE (callee))
647
            {
648
            case BUILT_IN_VA_START:
649
              break;
650
              /* If old style builtins are used, don't optimize anything.  */
651
            case BUILT_IN_SAVEREGS:
652
            case BUILT_IN_STDARG_START:
653
            case BUILT_IN_ARGS_INFO:
654
            case BUILT_IN_NEXT_ARG:
655
              va_list_escapes = true;
656
              continue;
657
            default:
658
              continue;
659
            }
660
 
661
          si.va_start_count++;
662
          ap = TREE_VALUE (TREE_OPERAND (call, 1));
663
 
664
          if (TREE_CODE (ap) != ADDR_EXPR)
665
            {
666
              va_list_escapes = true;
667
              break;
668
            }
669
          ap = TREE_OPERAND (ap, 0);
670
          if (TREE_CODE (ap) == ARRAY_REF)
671
            {
672
              if (! integer_zerop (TREE_OPERAND (ap, 1)))
673
                {
674
                  va_list_escapes = true;
675
                  break;
676
                }
677
              ap = TREE_OPERAND (ap, 0);
678
            }
679
          if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
680
              != TYPE_MAIN_VARIANT (va_list_type_node)
681
              || TREE_CODE (ap) != VAR_DECL)
682
            {
683
              va_list_escapes = true;
684
              break;
685
            }
686
 
687
          if (is_global_var (ap))
688
            {
689
              va_list_escapes = true;
690
              break;
691
            }
692
 
693
          bitmap_set_bit (si.va_list_vars, DECL_UID (ap));
694
 
695
          /* VA_START_BB and VA_START_AP will be only used if there is just
696
             one va_start in the function.  */
697
          si.va_start_bb = bb;
698
          si.va_start_ap = ap;
699
        }
700
 
701
      if (va_list_escapes)
702
        break;
703
    }
704
 
705
  /* If there were no va_start uses in the function, there is no need to
706
     save anything.  */
707
  if (si.va_start_count == 0)
708
    goto finish;
709
 
710
  /* If some va_list arguments weren't local, we can't optimize.  */
711
  if (va_list_escapes)
712
    goto finish;
713
 
714
  /* For void * or char * va_list, something useful can be done only
715
     if there is just one va_start.  */
716
  if (va_list_simple_ptr && si.va_start_count > 1)
717
    {
718
      va_list_escapes = true;
719
      goto finish;
720
    }
721
 
722
  /* For struct * va_list, if the backend didn't tell us what the counter fields
723
     are, there is nothing more we can do.  */
724
  if (!va_list_simple_ptr
725
      && va_list_gpr_counter_field == NULL_TREE
726
      && va_list_fpr_counter_field == NULL_TREE)
727
    {
728
      va_list_escapes = true;
729
      goto finish;
730
    }
731
 
732
  /* For void * or char * va_list there is just one counter
733
     (va_list itself).  Use VA_LIST_GPR_SIZE for it.  */
734
  if (va_list_simple_ptr)
735
    cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
736
 
737
  calculate_dominance_info (CDI_DOMINATORS);
738
 
739
  FOR_EACH_BB (bb)
740
    {
741
      block_stmt_iterator i;
742
 
743
      si.compute_sizes = -1;
744
      si.bb = bb;
745
 
746
      /* For va_list_simple_ptr, we have to check PHI nodes too.  We treat
747
         them as assignments for the purpose of escape analysis.  This is
748
         not needed for non-simple va_list because virtual phis don't perform
749
         any real data movement.  */
750
      if (va_list_simple_ptr)
751
        {
752
          tree phi, lhs, rhs;
753
          use_operand_p uop;
754
          ssa_op_iter soi;
755
 
756
          for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
757
            {
758
              lhs = PHI_RESULT (phi);
759
 
760
              if (!is_gimple_reg (lhs))
761
                continue;
762
 
763
              FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
764
                {
765
                  rhs = USE_FROM_PTR (uop);
766
                  if (va_list_ptr_read (&si, rhs, lhs))
767
                    continue;
768
                  else if (va_list_ptr_write (&si, lhs, rhs))
769
                    continue;
770
                  else
771
                    check_va_list_escapes (&si, lhs, rhs);
772
 
773
                  if (si.va_list_escapes
774
                      || walk_tree (&phi, find_va_list_reference,
775
                                    si.va_list_vars, NULL))
776
                    {
777
                      if (dump_file && (dump_flags & TDF_DETAILS))
778
                        {
779
                          fputs ("va_list escapes in ", dump_file);
780
                          print_generic_expr (dump_file, phi, dump_flags);
781
                          fputc ('\n', dump_file);
782
                        }
783
                      va_list_escapes = true;
784
                    }
785
                }
786
            }
787
        }
788
 
789
      for (i = bsi_start (bb);
790
           !bsi_end_p (i) && !va_list_escapes;
791
           bsi_next (&i))
792
        {
793
          tree stmt = bsi_stmt (i);
794
          tree call;
795
 
796
          /* Don't look at __builtin_va_{start,end}, they are ok.  */
797
          call = get_call_expr_in (stmt);
798
          if (call)
799
            {
800
              tree callee = get_callee_fndecl (call);
801
 
802
              if (callee
803
                  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
804
                  && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
805
                      || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
806
                continue;
807
            }
808
 
809
          if (TREE_CODE (stmt) == MODIFY_EXPR)
810
            {
811
              tree lhs = TREE_OPERAND (stmt, 0);
812
              tree rhs = TREE_OPERAND (stmt, 1);
813
 
814
              if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
815
                rhs = TREE_OPERAND (rhs, 0);
816
 
817
              if (va_list_simple_ptr)
818
                {
819
                  /* Check for tem = ap.  */
820
                  if (va_list_ptr_read (&si, rhs, lhs))
821
                    continue;
822
 
823
                  /* Check for the last insn in:
824
                     tem1 = ap;
825
                     tem2 = tem1 + CST;
826
                     ap = tem2;
827
                     sequence.  */
828
                  else if (va_list_ptr_write (&si, lhs, rhs))
829
                    continue;
830
 
831
                  else
832
                    check_va_list_escapes (&si, lhs, rhs);
833
                }
834
              else
835
                {
836
                  /* Check for ap[0].field = temp.  */
837
                  if (va_list_counter_struct_op (&si, lhs, rhs, true))
838
                    continue;
839
 
840
                  /* Check for temp = ap[0].field.  */
841
                  else if (va_list_counter_struct_op (&si, rhs, lhs, false))
842
                    continue;
843
 
844
                  /* Do any architecture specific checking.  */
845
                  else if (targetm.stdarg_optimize_hook
846
                           && targetm.stdarg_optimize_hook (&si, lhs, rhs))
847
                    continue;
848
                }
849
            }
850
 
851
          /* All other uses of va_list are either va_copy (that is not handled
852
             in this optimization), taking address of va_list variable or
853
             passing va_list to other functions (in that case va_list might
854
             escape the function and therefore va_start needs to set it up
855
             fully), or some unexpected use of va_list.  None of these should
856
             happen in a gimplified VA_ARG_EXPR.  */
857
          if (si.va_list_escapes
858
              || walk_tree (&stmt, find_va_list_reference,
859
                            si.va_list_vars, NULL))
860
            {
861
              if (dump_file && (dump_flags & TDF_DETAILS))
862
                {
863
                  fputs ("va_list escapes in ", dump_file);
864
                  print_generic_expr (dump_file, stmt, dump_flags);
865
                  fputc ('\n', dump_file);
866
                }
867
              va_list_escapes = true;
868
            }
869
        }
870
 
871
      if (va_list_escapes)
872
        break;
873
    }
874
 
875
  if (! va_list_escapes
876
      && va_list_simple_ptr
877
      && ! bitmap_empty_p (si.va_list_escape_vars)
878
      && check_all_va_list_escapes (&si))
879
    va_list_escapes = true;
880
 
881
finish:
882
  if (va_list_escapes)
883
    {
884
      cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
885
      cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
886
    }
887
  BITMAP_FREE (si.va_list_vars);
888
  BITMAP_FREE (si.va_list_escape_vars);
889
  free (si.offsets);
890
  if (dump_file)
891
    {
892
      fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
893
               funcname, (int) va_list_escapes);
894
      if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
895
        fputs ("all", dump_file);
896
      else
897
        fprintf (dump_file, "%d", cfun->va_list_gpr_size);
898
      fputs (" GPR units and ", dump_file);
899
      if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
900
        fputs ("all", dump_file);
901
      else
902
        fprintf (dump_file, "%d", cfun->va_list_fpr_size);
903
      fputs (" FPR units.\n", dump_file);
904
    }
905
  return 0;
906
}
907
 
908
 
909
struct tree_opt_pass pass_stdarg =
910
{
911
  "stdarg",                             /* name */
912
  gate_optimize_stdarg,                 /* gate */
913
  execute_optimize_stdarg,              /* execute */
914
  NULL,                                 /* sub */
915
  NULL,                                 /* next */
916
  0,                                     /* static_pass_number */
917
  0,                                     /* tv_id */
918
  PROP_cfg | PROP_ssa | PROP_alias,     /* properties_required */
919
  0,                                     /* properties_provided */
920
  0,                                     /* properties_destroyed */
921
  0,                                     /* todo_flags_start */
922
  TODO_dump_func,                       /* todo_flags_finish */
923
 
924
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.