OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [tags/] [gnu-src/] [gcc-4.5.1/] [gcc-4.5.1-or32-1.0rc1/] [gcc/] [ipa-pure-const.c] - Blame information for rev 513

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Callgraph based analysis of static variables.
2
   Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4
   Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
/* This file marks functions as being either const (TREE_READONLY) or
23
   pure (DECL_PURE_P).  It can also set a variant of these that
24
   are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25
 
26
   This must be run after inlining decisions have been made since
27
   otherwise, the local sets will not contain information that is
28
   consistent with post inlined state.  The global sets are not prone
29
   to this problem since they are by definition transitive.  */
30
 
31
/* The code in this module is called by the ipa pass manager. It
32
   should be one of the later passes since it's information is used by
33
   the rest of the compilation. */
34
 
35
#include "config.h"
36
#include "system.h"
37
#include "coretypes.h"
38
#include "tm.h"
39
#include "tree.h"
40
#include "tree-flow.h"
41
#include "tree-inline.h"
42
#include "tree-pass.h"
43
#include "langhooks.h"
44
#include "pointer-set.h"
45
#include "ggc.h"
46
#include "ipa-utils.h"
47
#include "gimple.h"
48
#include "cgraph.h"
49
#include "output.h"
50
#include "flags.h"
51
#include "timevar.h"
52
#include "diagnostic.h"
53
#include "langhooks.h"
54
#include "target.h"
55
#include "lto-streamer.h"
56
#include "cfgloop.h"
57
#include "tree-scalar-evolution.h"
58
 
59
static struct pointer_set_t *visited_nodes;
60
 
61
/* Lattice values for const and pure functions.  Everything starts out
62
   being const, then may drop to pure and then neither depending on
63
   what is found.  */
64
enum pure_const_state_e
65
{
66
  IPA_CONST,
67
  IPA_PURE,
68
  IPA_NEITHER
69
};
70
 
71
/* Holder for the const_state.  There is one of these per function
72
   decl.  */
73
struct funct_state_d
74
{
75
  /* See above.  */
76
  enum pure_const_state_e pure_const_state;
77
  /* What user set here; we can be always sure about this.  */
78
  enum pure_const_state_e state_previously_known;
79
  bool looping_previously_known;
80
 
81
  /* True if the function could possibly infinite loop.  There are a
82
     lot of ways that this could be determined.  We are pretty
83
     conservative here.  While it is possible to cse pure and const
84
     calls, it is not legal to have dce get rid of the call if there
85
     is a possibility that the call could infinite loop since this is
86
     a behavioral change.  */
87
  bool looping;
88
 
89
  bool can_throw;
90
};
91
 
92
typedef struct funct_state_d * funct_state;
93
 
94
/* The storage of the funct_state is abstracted because there is the
95
   possibility that it may be desirable to move this to the cgraph
96
   local info.  */
97
 
98
/* Array, indexed by cgraph node uid, of function states.  */
99
 
100
DEF_VEC_P (funct_state);
101
DEF_VEC_ALLOC_P (funct_state, heap);
102
static VEC (funct_state, heap) *funct_state_vec;
103
 
104
/* Holders of ipa cgraph hooks: */
105
static struct cgraph_node_hook_list *function_insertion_hook_holder;
106
static struct cgraph_2node_hook_list *node_duplication_hook_holder;
107
static struct cgraph_node_hook_list *node_removal_hook_holder;
108
 
109
/* Init the function state.  */
110
 
111
static void
112
finish_state (void)
113
{
114
  free (funct_state_vec);
115
}
116
 
117
 
118
/* Return the function state from NODE.  */
119
 
120
static inline funct_state
121
get_function_state (struct cgraph_node *node)
122
{
123
  if (!funct_state_vec
124
      || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
125
    return NULL;
126
  return VEC_index (funct_state, funct_state_vec, node->uid);
127
}
128
 
129
/* Set the function state S for NODE.  */
130
 
131
static inline void
132
set_function_state (struct cgraph_node *node, funct_state s)
133
{
134
  if (!funct_state_vec
135
      || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
136
     VEC_safe_grow_cleared (funct_state, heap, funct_state_vec, node->uid + 1);
137
  VEC_replace (funct_state, funct_state_vec, node->uid, s);
138
}
139
 
140
/* Check to see if the use (or definition when CHECKING_WRITE is true)
141
   variable T is legal in a function that is either pure or const.  */
142
 
143
static inline void
144
check_decl (funct_state local,
145
            tree t, bool checking_write)
146
{
147
  /* Do not want to do anything with volatile except mark any
148
     function that uses one to be not const or pure.  */
149
  if (TREE_THIS_VOLATILE (t))
150
    {
151
      local->pure_const_state = IPA_NEITHER;
152
      if (dump_file)
153
        fprintf (dump_file, "    Volatile operand is not const/pure");
154
      return;
155
    }
156
 
157
  /* Do not care about a local automatic that is not static.  */
158
  if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
159
    return;
160
 
161
  /* If the variable has the "used" attribute, treat it as if it had a
162
     been touched by the devil.  */
163
  if (DECL_PRESERVE_P (t))
164
    {
165
      local->pure_const_state = IPA_NEITHER;
166
      if (dump_file)
167
        fprintf (dump_file, "    Used static/global variable is not const/pure\n");
168
      return;
169
    }
170
 
171
  /* Since we have dealt with the locals and params cases above, if we
172
     are CHECKING_WRITE, this cannot be a pure or constant
173
     function.  */
174
  if (checking_write)
175
    {
176
      local->pure_const_state = IPA_NEITHER;
177
      if (dump_file)
178
        fprintf (dump_file, "    static/global memory write is not const/pure\n");
179
      return;
180
    }
181
 
182
  if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
183
    {
184
      /* Readonly reads are safe.  */
185
      if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
186
        return; /* Read of a constant, do not change the function state.  */
187
      else
188
        {
189
          if (dump_file)
190
            fprintf (dump_file, "    global memory read is not const\n");
191
          /* Just a regular read.  */
192
          if (local->pure_const_state == IPA_CONST)
193
            local->pure_const_state = IPA_PURE;
194
        }
195
    }
196
  else
197
    {
198
      /* Compilation level statics can be read if they are readonly
199
         variables.  */
200
      if (TREE_READONLY (t))
201
        return;
202
 
203
      if (dump_file)
204
        fprintf (dump_file, "    static memory read is not const\n");
205
      /* Just a regular read.  */
206
      if (local->pure_const_state == IPA_CONST)
207
        local->pure_const_state = IPA_PURE;
208
    }
209
}
210
 
211
 
212
/* Check to see if the use (or definition when CHECKING_WRITE is true)
213
   variable T is legal in a function that is either pure or const.  */
214
 
215
static inline void
216
check_op (funct_state local, tree t, bool checking_write)
217
{
218
  t = get_base_address (t);
219
  if (t && TREE_THIS_VOLATILE (t))
220
    {
221
      local->pure_const_state = IPA_NEITHER;
222
      if (dump_file)
223
        fprintf (dump_file, "    Volatile indirect ref is not const/pure\n");
224
      return;
225
    }
226
  else if (t
227
           && INDIRECT_REF_P (t)
228
           && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
229
           && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
230
    {
231
      if (dump_file)
232
        fprintf (dump_file, "    Indirect ref to local memory is OK\n");
233
      return;
234
    }
235
  else if (checking_write)
236
    {
237
      local->pure_const_state = IPA_NEITHER;
238
      if (dump_file)
239
        fprintf (dump_file, "    Indirect ref write is not const/pure\n");
240
      return;
241
    }
242
  else
243
    {
244
      if (dump_file)
245
        fprintf (dump_file, "    Indirect ref read is not const\n");
246
      if (local->pure_const_state == IPA_CONST)
247
        local->pure_const_state = IPA_PURE;
248
    }
249
}
250
 
251
/* Check the parameters of a function call to CALL_EXPR to see if
252
   there are any references in the parameters that are not allowed for
253
   pure or const functions.  Also check to see if this is either an
254
   indirect call, a call outside the compilation unit, or has special
255
   attributes that may also effect the purity.  The CALL_EXPR node for
256
   the entire call expression.  */
257
 
258
static void
259
check_call (funct_state local, gimple call, bool ipa)
260
{
261
  int flags = gimple_call_flags (call);
262
  tree callee_t = gimple_call_fndecl (call);
263
  bool possibly_throws = stmt_could_throw_p (call);
264
  bool possibly_throws_externally = (possibly_throws
265
                                     && stmt_can_throw_external (call));
266
 
267
  if (possibly_throws)
268
    {
269
      unsigned int i;
270
      for (i = 0; i < gimple_num_ops (call); i++)
271
        if (gimple_op (call, i)
272
            && tree_could_throw_p (gimple_op (call, i)))
273
          {
274
            if (possibly_throws && flag_non_call_exceptions)
275
              {
276
                if (dump_file)
277
                  fprintf (dump_file, "    operand can throw; looping\n");
278
                local->looping = true;
279
              }
280
            if (possibly_throws_externally)
281
              {
282
                if (dump_file)
283
                  fprintf (dump_file, "    operand can throw externally\n");
284
                local->can_throw = true;
285
              }
286
          }
287
    }
288
 
289
  /* The const and pure flags are set by a variety of places in the
290
     compiler (including here).  If someone has already set the flags
291
     for the callee, (such as for some of the builtins) we will use
292
     them, otherwise we will compute our own information.
293
 
294
     Const and pure functions have less clobber effects than other
295
     functions so we process these first.  Otherwise if it is a call
296
     outside the compilation unit or an indirect call we punt.  This
297
     leaves local calls which will be processed by following the call
298
     graph.  */
299
  if (callee_t)
300
    {
301
      /* When bad things happen to bad functions, they cannot be const
302
         or pure.  */
303
      if (setjmp_call_p (callee_t))
304
        {
305
          if (dump_file)
306
            fprintf (dump_file, "    setjmp is not const/pure\n");
307
          local->looping = true;
308
          local->pure_const_state = IPA_NEITHER;
309
        }
310
 
311
      if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
312
        switch (DECL_FUNCTION_CODE (callee_t))
313
          {
314
          case BUILT_IN_LONGJMP:
315
          case BUILT_IN_NONLOCAL_GOTO:
316
            if (dump_file)
317
              fprintf (dump_file, "    longjmp and nonlocal goto is not const/pure\n");
318
            local->pure_const_state = IPA_NEITHER;
319
            local->looping = true;
320
            break;
321
          default:
322
            break;
323
          }
324
    }
325
 
326
  /* When not in IPA mode, we can still handle self recursion.  */
327
  if (!ipa && callee_t == current_function_decl)
328
    local->looping = true;
329
  /* Either calle is unknown or we are doing local analysis.
330
     Look to see if there are any bits available for the callee (such as by
331
     declaration or because it is builtin) and process solely on the basis of
332
     those bits. */
333
  else if (!ipa || !callee_t)
334
    {
335
      if (possibly_throws && flag_non_call_exceptions)
336
        {
337
          if (dump_file)
338
            fprintf (dump_file, "    can throw; looping\n");
339
          local->looping = true;
340
        }
341
      if (possibly_throws_externally)
342
        {
343
          if (dump_file)
344
            {
345
              fprintf (dump_file, "    can throw externally to lp %i\n",
346
                       lookup_stmt_eh_lp (call));
347
              if (callee_t)
348
                fprintf (dump_file, "     callee:%s\n",
349
                         IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
350
            }
351
          local->can_throw = true;
352
        }
353
      if (flags & ECF_CONST)
354
        {
355
          if (callee_t && DECL_LOOPING_CONST_OR_PURE_P (callee_t))
356
            local->looping = true;
357
         }
358
      else if (flags & ECF_PURE)
359
        {
360
          if (callee_t && DECL_LOOPING_CONST_OR_PURE_P (callee_t))
361
            local->looping = true;
362
          if (dump_file)
363
            fprintf (dump_file, "    pure function call in not const\n");
364
          if (local->pure_const_state == IPA_CONST)
365
            local->pure_const_state = IPA_PURE;
366
        }
367
      else
368
        {
369
          if (dump_file)
370
            fprintf (dump_file, "    uknown function call is not const/pure\n");
371
          local->pure_const_state = IPA_NEITHER;
372
          local->looping = true;
373
        }
374
    }
375
  /* Direct functions calls are handled by IPA propagation.  */
376
}
377
 
378
/* Wrapper around check_decl for loads.  */
379
 
380
static bool
381
check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
382
{
383
  if (DECL_P (op))
384
    check_decl ((funct_state)data, op, false);
385
  else
386
    check_op ((funct_state)data, op, false);
387
  return false;
388
}
389
 
390
/* Wrapper around check_decl for stores.  */
391
 
392
static bool
393
check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
394
{
395
  if (DECL_P (op))
396
    check_decl ((funct_state)data, op, true);
397
  else
398
    check_op ((funct_state)data, op, true);
399
  return false;
400
}
401
 
402
/* Look into pointer pointed to by GSIP and figure out what interesting side
403
   effects it has.  */
404
static void
405
check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
406
{
407
  gimple stmt = gsi_stmt (*gsip);
408
  unsigned int i = 0;
409
 
410
  if (is_gimple_debug (stmt))
411
    return;
412
 
413
  if (dump_file)
414
    {
415
      fprintf (dump_file, "  scanning: ");
416
      print_gimple_stmt (dump_file, stmt, 0, 0);
417
    }
418
 
419
  /* Look for loads and stores.  */
420
  walk_stmt_load_store_ops (stmt, local, check_load, check_store);
421
 
422
  if (gimple_code (stmt) != GIMPLE_CALL
423
      && stmt_could_throw_p (stmt))
424
    {
425
      if (flag_non_call_exceptions)
426
        {
427
          if (dump_file)
428
            fprintf (dump_file, "    can throw; looping");
429
          local->looping = true;
430
        }
431
      if (stmt_can_throw_external (stmt))
432
        {
433
          if (dump_file)
434
            fprintf (dump_file, "    can throw externally");
435
          local->can_throw = true;
436
        }
437
    }
438
  switch (gimple_code (stmt))
439
    {
440
    case GIMPLE_CALL:
441
      check_call (local, stmt, ipa);
442
      break;
443
    case GIMPLE_LABEL:
444
      if (DECL_NONLOCAL (gimple_label_label (stmt)))
445
        /* Target of long jump. */
446
        {
447
          if (dump_file)
448
            fprintf (dump_file, "    nonlocal label is not const/pure");
449
          local->pure_const_state = IPA_NEITHER;
450
        }
451
      break;
452
    case GIMPLE_ASM:
453
      for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
454
        {
455
          tree op = gimple_asm_clobber_op (stmt, i);
456
          if (simple_cst_equal(TREE_VALUE (op), memory_identifier_string) == 1)
457
            {
458
              if (dump_file)
459
                fprintf (dump_file, "    memory asm clobber is not const/pure");
460
              /* Abandon all hope, ye who enter here. */
461
              local->pure_const_state = IPA_NEITHER;
462
            }
463
        }
464
      if (gimple_asm_volatile_p (stmt))
465
        {
466
          if (dump_file)
467
            fprintf (dump_file, "    volatile is not const/pure");
468
          /* Abandon all hope, ye who enter here. */
469
          local->pure_const_state = IPA_NEITHER;
470
          local->looping = true;
471
        }
472
      return;
473
    default:
474
      break;
475
    }
476
}
477
 
478
 
479
/* This is the main routine for finding the reference patterns for
480
   global variables within a function FN.  */
481
 
482
static funct_state
483
analyze_function (struct cgraph_node *fn, bool ipa)
484
{
485
  tree decl = fn->decl;
486
  tree old_decl = current_function_decl;
487
  funct_state l;
488
  basic_block this_block;
489
 
490
  l = XCNEW (struct funct_state_d);
491
  l->pure_const_state = IPA_CONST;
492
  l->state_previously_known = IPA_NEITHER;
493
  l->looping_previously_known = true;
494
  l->looping = false;
495
  l->can_throw = false;
496
 
497
  if (dump_file)
498
    {
499
      fprintf (dump_file, "\n\n local analysis of %s\n ",
500
               cgraph_node_name (fn));
501
    }
502
 
503
  push_cfun (DECL_STRUCT_FUNCTION (decl));
504
  current_function_decl = decl;
505
 
506
  FOR_EACH_BB (this_block)
507
    {
508
      gimple_stmt_iterator gsi;
509
      struct walk_stmt_info wi;
510
 
511
      memset (&wi, 0, sizeof(wi));
512
      for (gsi = gsi_start_bb (this_block);
513
           !gsi_end_p (gsi);
514
           gsi_next (&gsi))
515
        {
516
          check_stmt (&gsi, l, ipa);
517
          if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
518
            goto end;
519
        }
520
    }
521
 
522
end:
523
  if (l->pure_const_state != IPA_NEITHER)
524
    {
525
      /* Const functions cannot have back edges (an
526
         indication of possible infinite loop side
527
         effect.  */
528
      if (mark_dfs_back_edges ())
529
        {
530
          /* Preheaders are needed for SCEV to work.
531
             Simple lateches and recorded exits improve chances that loop will
532
             proved to be finite in testcases such as in loop-15.c and loop-24.c  */
533
          loop_optimizer_init (LOOPS_NORMAL
534
                               | LOOPS_HAVE_RECORDED_EXITS);
535
          if (dump_file && (dump_flags & TDF_DETAILS))
536
            flow_loops_dump (dump_file, NULL, 0);
537
          if (mark_irreducible_loops ())
538
            {
539
              if (dump_file)
540
                fprintf (dump_file, "    has irreducible loops\n");
541
              l->looping = true;
542
            }
543
          else
544
            {
545
              loop_iterator li;
546
              struct loop *loop;
547
              scev_initialize ();
548
              FOR_EACH_LOOP (li, loop, 0)
549
                if (!finite_loop_p (loop))
550
                  {
551
                    if (dump_file)
552
                      fprintf (dump_file, "    can not prove finiteness of loop %i\n", loop->num);
553
                    l->looping =true;
554
                    break;
555
                  }
556
              scev_finalize ();
557
            }
558
          loop_optimizer_finalize ();
559
        }
560
    }
561
 
562
  if (TREE_READONLY (decl))
563
    {
564
      l->pure_const_state = IPA_CONST;
565
      l->state_previously_known = IPA_CONST;
566
      if (!DECL_LOOPING_CONST_OR_PURE_P (decl))
567
        l->looping = false, l->looping_previously_known = false;
568
    }
569
  if (DECL_PURE_P (decl))
570
    {
571
      if (l->pure_const_state != IPA_CONST)
572
        l->pure_const_state = IPA_PURE;
573
      l->state_previously_known = IPA_PURE;
574
      if (!DECL_LOOPING_CONST_OR_PURE_P (decl))
575
        l->looping = false, l->looping_previously_known = false;
576
    }
577
  if (TREE_NOTHROW (decl))
578
    l->can_throw = false;
579
 
580
  pop_cfun ();
581
  current_function_decl = old_decl;
582
  if (dump_file)
583
    {
584
      if (l->looping)
585
        fprintf (dump_file, "Function is locally looping.\n");
586
      if (l->can_throw)
587
        fprintf (dump_file, "Function is locally throwing.\n");
588
      if (l->pure_const_state == IPA_CONST)
589
        fprintf (dump_file, "Function is locally const.\n");
590
      if (l->pure_const_state == IPA_PURE)
591
        fprintf (dump_file, "Function is locally pure.\n");
592
    }
593
  return l;
594
}
595
 
596
/* Called when new function is inserted to callgraph late.  */
597
static void
598
add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
599
{
600
 if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
601
   return;
602
  /* There are some shared nodes, in particular the initializers on
603
     static declarations.  We do not need to scan them more than once
604
     since all we would be interested in are the addressof
605
     operations.  */
606
  visited_nodes = pointer_set_create ();
607
  set_function_state (node, analyze_function (node, true));
608
  pointer_set_destroy (visited_nodes);
609
  visited_nodes = NULL;
610
}
611
 
612
/* Called when new clone is inserted to callgraph late.  */
613
 
614
static void
615
duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
616
                     void *data ATTRIBUTE_UNUSED)
617
{
618
  if (get_function_state (src))
619
    {
620
      funct_state l = XNEW (struct funct_state_d);
621
      gcc_assert (!get_function_state (dst));
622
      memcpy (l, get_function_state (src), sizeof (*l));
623
      set_function_state (dst, l);
624
    }
625
}
626
 
627
/* Called when new clone is inserted to callgraph late.  */
628
 
629
static void
630
remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
631
{
632
  if (get_function_state (node))
633
    {
634
      free (get_function_state (node));
635
      set_function_state (node, NULL);
636
    }
637
}
638
 
639
 
640
static void
641
register_hooks (void)
642
{
643
  static bool init_p = false;
644
 
645
  if (init_p)
646
    return;
647
 
648
  init_p = true;
649
 
650
  node_removal_hook_holder =
651
      cgraph_add_node_removal_hook (&remove_node_data, NULL);
652
  node_duplication_hook_holder =
653
      cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
654
  function_insertion_hook_holder =
655
      cgraph_add_function_insertion_hook (&add_new_function, NULL);
656
}
657
 
658
 
659
/* Analyze each function in the cgraph to see if it is locally PURE or
660
   CONST.  */
661
 
662
static void
663
generate_summary (void)
664
{
665
  struct cgraph_node *node;
666
 
667
  register_hooks ();
668
 
669
  /* There are some shared nodes, in particular the initializers on
670
     static declarations.  We do not need to scan them more than once
671
     since all we would be interested in are the addressof
672
     operations.  */
673
  visited_nodes = pointer_set_create ();
674
 
675
  /* Process all of the functions.
676
 
677
     We process AVAIL_OVERWRITABLE functions.  We can not use the results
678
     by default, but the info can be used at LTO with -fwhole-program or
679
     when function got clonned and the clone is AVAILABLE.  */
680
 
681
  for (node = cgraph_nodes; node; node = node->next)
682
    if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
683
      set_function_state (node, analyze_function (node, true));
684
 
685
  pointer_set_destroy (visited_nodes);
686
  visited_nodes = NULL;
687
}
688
 
689
 
690
/* Serialize the ipa info for lto.  */
691
 
692
static void
693
pure_const_write_summary (cgraph_node_set set)
694
{
695
  struct cgraph_node *node;
696
  struct lto_simple_output_block *ob
697
    = lto_create_simple_output_block (LTO_section_ipa_pure_const);
698
  unsigned int count = 0;
699
  cgraph_node_set_iterator csi;
700
 
701
  for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
702
    {
703
      node = csi_node (csi);
704
      if (node->analyzed && get_function_state (node) != NULL)
705
        count++;
706
    }
707
 
708
  lto_output_uleb128_stream (ob->main_stream, count);
709
 
710
  /* Process all of the functions.  */
711
  for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
712
    {
713
      node = csi_node (csi);
714
      if (node->analyzed && get_function_state (node) != NULL)
715
        {
716
          struct bitpack_d *bp;
717
          funct_state fs;
718
          int node_ref;
719
          lto_cgraph_encoder_t encoder;
720
 
721
          fs = get_function_state (node);
722
 
723
          encoder = ob->decl_state->cgraph_node_encoder;
724
          node_ref = lto_cgraph_encoder_encode (encoder, node);
725
          lto_output_uleb128_stream (ob->main_stream, node_ref);
726
 
727
          /* Note that flags will need to be read in the opposite
728
             order as we are pushing the bitflags into FLAGS.  */
729
          bp = bitpack_create ();
730
          bp_pack_value (bp, fs->pure_const_state, 2);
731
          bp_pack_value (bp, fs->state_previously_known, 2);
732
          bp_pack_value (bp, fs->looping_previously_known, 1);
733
          bp_pack_value (bp, fs->looping, 1);
734
          bp_pack_value (bp, fs->can_throw, 1);
735
          lto_output_bitpack (ob->main_stream, bp);
736
          bitpack_delete (bp);
737
        }
738
    }
739
 
740
  lto_destroy_simple_output_block (ob);
741
}
742
 
743
 
744
/* Deserialize the ipa info for lto.  */
745
 
746
static void
747
pure_const_read_summary (void)
748
{
749
  struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
750
  struct lto_file_decl_data *file_data;
751
  unsigned int j = 0;
752
 
753
  register_hooks ();
754
  while ((file_data = file_data_vec[j++]))
755
    {
756
      const char *data;
757
      size_t len;
758
      struct lto_input_block *ib
759
        = lto_create_simple_input_block (file_data,
760
                                         LTO_section_ipa_pure_const,
761
                                         &data, &len);
762
      if (ib)
763
        {
764
          unsigned int i;
765
          unsigned int count = lto_input_uleb128 (ib);
766
 
767
          for (i = 0; i < count; i++)
768
            {
769
              unsigned int index;
770
              struct cgraph_node *node;
771
              struct bitpack_d *bp;
772
              funct_state fs;
773
              lto_cgraph_encoder_t encoder;
774
 
775
              fs = XCNEW (struct funct_state_d);
776
              index = lto_input_uleb128 (ib);
777
              encoder = file_data->cgraph_node_encoder;
778
              node = lto_cgraph_encoder_deref (encoder, index);
779
              set_function_state (node, fs);
780
 
781
              /* Note that the flags must be read in the opposite
782
                 order in which they were written (the bitflags were
783
                 pushed into FLAGS).  */
784
              bp = lto_input_bitpack (ib);
785
              fs->pure_const_state
786
                        = (enum pure_const_state_e) bp_unpack_value (bp, 2);
787
              fs->state_previously_known
788
                        = (enum pure_const_state_e) bp_unpack_value (bp, 2);
789
              fs->looping_previously_known = bp_unpack_value (bp, 1);
790
              fs->looping = bp_unpack_value (bp, 1);
791
              fs->can_throw = bp_unpack_value (bp, 1);
792
              bitpack_delete (bp);
793
            }
794
 
795
          lto_destroy_simple_input_block (file_data,
796
                                          LTO_section_ipa_pure_const,
797
                                          ib, data, len);
798
        }
799
    }
800
}
801
 
802
 
803
static bool
804
ignore_edge (struct cgraph_edge *e)
805
{
806
  return (!e->can_throw_external);
807
}
808
 
809
/* Return true if NODE is self recursive function.  */
810
 
811
static bool
812
self_recursive_p (struct cgraph_node *node)
813
{
814
  struct cgraph_edge *e;
815
  for (e = node->callees; e; e = e->next_callee)
816
    if (e->callee == node)
817
      return true;
818
  return false;
819
}
820
 
821
/* Produce the global information by preforming a transitive closure
822
   on the local information that was produced by generate_summary.
823
   Note that there is no function_transform pass since this only
824
   updates the function_decl.  */
825
 
826
static unsigned int
827
propagate (void)
828
{
829
  struct cgraph_node *node;
830
  struct cgraph_node *w;
831
  struct cgraph_node **order =
832
    XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
833
  int order_pos;
834
  int i;
835
  struct ipa_dfs_info * w_info;
836
 
837
  cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
838
  cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
839
  cgraph_remove_node_removal_hook (node_removal_hook_holder);
840
  order_pos = ipa_utils_reduced_inorder (order, true, false, NULL);
841
  if (dump_file)
842
    {
843
      dump_cgraph (dump_file);
844
      ipa_utils_print_order(dump_file, "reduced", order, order_pos);
845
    }
846
 
847
  /* Propagate the local information thru the call graph to produce
848
     the global information.  All the nodes within a cycle will have
849
     the same info so we collapse cycles first.  Then we can do the
850
     propagation in one pass from the leaves to the roots.  */
851
  for (i = 0; i < order_pos; i++ )
852
    {
853
      enum pure_const_state_e pure_const_state = IPA_CONST;
854
      bool looping = false;
855
      int count = 0;
856
      node = order[i];
857
 
858
      /* Find the worst state for any node in the cycle.  */
859
      w = node;
860
      while (w)
861
        {
862
          struct cgraph_edge *e;
863
          funct_state w_l = get_function_state (w);
864
          if (pure_const_state < w_l->pure_const_state)
865
            pure_const_state = w_l->pure_const_state;
866
 
867
          if (w_l->looping)
868
            looping = true;
869
          if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
870
            {
871
              looping |= w_l->looping_previously_known;
872
              if (pure_const_state < w_l->state_previously_known)
873
                pure_const_state = w_l->state_previously_known;
874
            }
875
 
876
          if (pure_const_state == IPA_NEITHER)
877
            break;
878
 
879
          count++;
880
 
881
          if (count > 1)
882
            looping = true;
883
 
884
          for (e = w->callees; e; e = e->next_callee)
885
            {
886
              struct cgraph_node *y = e->callee;
887
 
888
              if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
889
                {
890
                  funct_state y_l = get_function_state (y);
891
                  if (pure_const_state < y_l->pure_const_state)
892
                    pure_const_state = y_l->pure_const_state;
893
                  if (pure_const_state == IPA_NEITHER)
894
                    break;
895
                  if (y_l->looping)
896
                    looping = true;
897
                }
898
              else
899
                {
900
                  int flags = flags_from_decl_or_type (y->decl);
901
 
902
                  if (flags & ECF_LOOPING_CONST_OR_PURE)
903
                    looping = true;
904
                  if (flags & ECF_CONST)
905
                    ;
906
                  else if ((flags & ECF_PURE) && pure_const_state == IPA_CONST)
907
                    pure_const_state = IPA_PURE;
908
                  else
909
                    pure_const_state = IPA_NEITHER, looping = true;
910
 
911
                }
912
            }
913
          w_info = (struct ipa_dfs_info *) w->aux;
914
          w = w_info->next_cycle;
915
        }
916
 
917
      /* Copy back the region's pure_const_state which is shared by
918
         all nodes in the region.  */
919
      w = node;
920
      while (w)
921
        {
922
          funct_state w_l = get_function_state (w);
923
          enum pure_const_state_e this_state = pure_const_state;
924
          bool this_looping = looping;
925
 
926
          if (w_l->state_previously_known != IPA_NEITHER
927
              && this_state > w_l->state_previously_known)
928
            this_state = w_l->state_previously_known;
929
          if (!this_looping && self_recursive_p (w))
930
            this_looping = true;
931
          if (!w_l->looping_previously_known)
932
            this_looping = false;
933
 
934
          /* All nodes within a cycle share the same info.  */
935
          w_l->pure_const_state = this_state;
936
          w_l->looping = this_looping;
937
 
938
          switch (this_state)
939
            {
940
            case IPA_CONST:
941
              if (!TREE_READONLY (w->decl) && dump_file)
942
                fprintf (dump_file, "Function found to be %sconst: %s\n",
943
                         this_looping ? "looping " : "",
944
                         cgraph_node_name (w));
945
              cgraph_set_readonly_flag (w, true);
946
              cgraph_set_looping_const_or_pure_flag (w, this_looping);
947
              break;
948
 
949
            case IPA_PURE:
950
              if (!DECL_PURE_P (w->decl) && dump_file)
951
                fprintf (dump_file, "Function found to be %spure: %s\n",
952
                         this_looping ? "looping " : "",
953
                         cgraph_node_name (w));
954
              cgraph_set_pure_flag (w, true);
955
              cgraph_set_looping_const_or_pure_flag (w, this_looping);
956
              break;
957
 
958
            default:
959
              break;
960
            }
961
          w_info = (struct ipa_dfs_info *) w->aux;
962
          w = w_info->next_cycle;
963
        }
964
    }
965
 
966
  /* Cleanup. */
967
  for (node = cgraph_nodes; node; node = node->next)
968
    {
969
      /* Get rid of the aux information.  */
970
      if (node->aux)
971
        {
972
          w_info = (struct ipa_dfs_info *) node->aux;
973
          free (node->aux);
974
          node->aux = NULL;
975
        }
976
    }
977
  order_pos = ipa_utils_reduced_inorder (order, true, false, ignore_edge);
978
  if (dump_file)
979
    {
980
      dump_cgraph (dump_file);
981
      ipa_utils_print_order(dump_file, "reduced for nothrow", order, order_pos);
982
    }
983
  /* Propagate the local information thru the call graph to produce
984
     the global information.  All the nodes within a cycle will have
985
     the same info so we collapse cycles first.  Then we can do the
986
     propagation in one pass from the leaves to the roots.  */
987
  for (i = 0; i < order_pos; i++ )
988
    {
989
      bool can_throw = false;
990
      node = order[i];
991
 
992
      /* Find the worst state for any node in the cycle.  */
993
      w = node;
994
      while (w)
995
        {
996
          struct cgraph_edge *e;
997
          funct_state w_l = get_function_state (w);
998
 
999
          if (w_l->can_throw
1000
              || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1001
            can_throw = true;
1002
 
1003
          if (can_throw)
1004
            break;
1005
 
1006
          for (e = w->callees; e; e = e->next_callee)
1007
            {
1008
              struct cgraph_node *y = e->callee;
1009
 
1010
              if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
1011
                {
1012
                  funct_state y_l = get_function_state (y);
1013
 
1014
                  if (can_throw)
1015
                    break;
1016
                  if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1017
                      && e->can_throw_external)
1018
                    can_throw = true;
1019
                }
1020
              else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
1021
                can_throw = true;
1022
            }
1023
          w_info = (struct ipa_dfs_info *) w->aux;
1024
          w = w_info->next_cycle;
1025
        }
1026
 
1027
      /* Copy back the region's pure_const_state which is shared by
1028
         all nodes in the region.  */
1029
      w = node;
1030
      while (w)
1031
        {
1032
          funct_state w_l = get_function_state (w);
1033
          if (!can_throw && !TREE_NOTHROW (w->decl))
1034
            {
1035
              struct cgraph_edge *e;
1036
              cgraph_set_nothrow_flag (w, true);
1037
              for (e = w->callers; e; e = e->next_caller)
1038
                e->can_throw_external = false;
1039
              if (dump_file)
1040
                fprintf (dump_file, "Function found to be nothrow: %s\n",
1041
                         cgraph_node_name (w));
1042
            }
1043
          else if (can_throw && !TREE_NOTHROW (w->decl))
1044
            w_l->can_throw = true;
1045
          w_info = (struct ipa_dfs_info *) w->aux;
1046
          w = w_info->next_cycle;
1047
        }
1048
    }
1049
 
1050
  /* Cleanup. */
1051
  for (node = cgraph_nodes; node; node = node->next)
1052
    {
1053
      /* Get rid of the aux information.  */
1054
      if (node->aux)
1055
        {
1056
          w_info = (struct ipa_dfs_info *) node->aux;
1057
          free (node->aux);
1058
          node->aux = NULL;
1059
        }
1060
      if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
1061
        free (get_function_state (node));
1062
    }
1063
 
1064
  free (order);
1065
  VEC_free (funct_state, heap, funct_state_vec);
1066
  finish_state ();
1067
  return 0;
1068
}
1069
 
1070
static bool
1071
gate_pure_const (void)
1072
{
1073
  return (flag_ipa_pure_const
1074
          /* Don't bother doing anything if the program has errors.  */
1075
          && !(errorcount || sorrycount));
1076
}
1077
 
1078
struct ipa_opt_pass_d pass_ipa_pure_const =
1079
{
1080
 {
1081
  IPA_PASS,
1082
  "pure-const",                         /* name */
1083
  gate_pure_const,                      /* gate */
1084
  propagate,                            /* execute */
1085
  NULL,                                 /* sub */
1086
  NULL,                                 /* next */
1087
  0,                                     /* static_pass_number */
1088
  TV_IPA_PURE_CONST,                    /* tv_id */
1089
  0,                                     /* properties_required */
1090
  0,                                     /* properties_provided */
1091
  0,                                     /* properties_destroyed */
1092
  0,                                     /* todo_flags_start */
1093
 
1094
 },
1095
 generate_summary,                      /* generate_summary */
1096
 pure_const_write_summary,              /* write_summary */
1097
 pure_const_read_summary,               /* read_summary */
1098
 NULL,                                  /* function_read_summary */
1099
 NULL,                                  /* stmt_fixup */
1100
 0,                                      /* TODOs */
1101
 NULL,                                  /* function_transform */
1102
 NULL                                   /* variable_transform */
1103
};
1104
 
1105
/* Simple local pass for pure const discovery reusing the analysis from
1106
   ipa_pure_const.   This pass is effective when executed together with
1107
   other optimization passes in early optimization pass queue.  */
1108
 
1109
static unsigned int
1110
local_pure_const (void)
1111
{
1112
  bool changed = false;
1113
  funct_state l;
1114
  struct cgraph_node *node;
1115
 
1116
  /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1117
     we must not promote functions that are called by already processed functions.  */
1118
 
1119
  if (function_called_by_processed_nodes_p ())
1120
    {
1121
      if (dump_file)
1122
        fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1123
      return 0;
1124
    }
1125
  node = cgraph_node (current_function_decl);
1126
  if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
1127
    {
1128
      if (dump_file)
1129
        fprintf (dump_file, "Function has wrong visibility; ignoring\n");
1130
      return 0;
1131
    }
1132
 
1133
  l = analyze_function (node, false);
1134
 
1135
  switch (l->pure_const_state)
1136
    {
1137
    case IPA_CONST:
1138
      if (!TREE_READONLY (current_function_decl))
1139
        {
1140
          cgraph_set_readonly_flag (node, true);
1141
          cgraph_set_looping_const_or_pure_flag (node, l->looping);
1142
          changed = true;
1143
          if (dump_file)
1144
            fprintf (dump_file, "Function found to be %sconst: %s\n",
1145
                     l->looping ? "looping " : "",
1146
                     lang_hooks.decl_printable_name (current_function_decl,
1147
                                                     2));
1148
        }
1149
      else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1150
               && !l->looping)
1151
        {
1152
          cgraph_set_looping_const_or_pure_flag (node, false);
1153
          changed = true;
1154
          if (dump_file)
1155
            fprintf (dump_file, "Function found to be non-looping: %s\n",
1156
                     lang_hooks.decl_printable_name (current_function_decl,
1157
                                                     2));
1158
        }
1159
      break;
1160
 
1161
    case IPA_PURE:
1162
      if (!TREE_READONLY (current_function_decl))
1163
        {
1164
          cgraph_set_pure_flag (node, true);
1165
          cgraph_set_looping_const_or_pure_flag (node, l->looping);
1166
          changed = true;
1167
          if (dump_file)
1168
            fprintf (dump_file, "Function found to be %spure: %s\n",
1169
                     l->looping ? "looping " : "",
1170
                     lang_hooks.decl_printable_name (current_function_decl,
1171
                                                     2));
1172
        }
1173
      else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1174
               && !l->looping)
1175
        {
1176
          cgraph_set_looping_const_or_pure_flag (node, false);
1177
          changed = true;
1178
          if (dump_file)
1179
            fprintf (dump_file, "Function found to be non-looping: %s\n",
1180
                     lang_hooks.decl_printable_name (current_function_decl,
1181
                                                     2));
1182
        }
1183
      break;
1184
 
1185
    default:
1186
      break;
1187
    }
1188
  if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1189
    {
1190
      struct cgraph_edge *e;
1191
 
1192
      cgraph_set_nothrow_flag (node, true);
1193
      for (e = node->callers; e; e = e->next_caller)
1194
        e->can_throw_external = false;
1195
      changed = true;
1196
      if (dump_file)
1197
        fprintf (dump_file, "Function found to be nothrow: %s\n",
1198
                 lang_hooks.decl_printable_name (current_function_decl,
1199
                                                 2));
1200
    }
1201
  if (l)
1202
    free (l);
1203
  if (changed)
1204
    return execute_fixup_cfg ();
1205
  else
1206
    return 0;
1207
}
1208
 
1209
struct gimple_opt_pass pass_local_pure_const =
1210
{
1211
 {
1212
  GIMPLE_PASS,
1213
  "local-pure-const",                   /* name */
1214
  gate_pure_const,                      /* gate */
1215
  local_pure_const,                     /* execute */
1216
  NULL,                                 /* sub */
1217
  NULL,                                 /* next */
1218
  0,                                     /* static_pass_number */
1219
  TV_IPA_PURE_CONST,                    /* tv_id */
1220
  0,                                     /* properties_required */
1221
  0,                                     /* properties_provided */
1222
  0,                                     /* properties_destroyed */
1223
  0,                                     /* todo_flags_start */
1224
 
1225
 }
1226
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.