OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [tree-optimize.c] - Blame information for rev 867

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Top-level control of tree optimizations.
2
   Copyright 2001, 2002, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
3
   Contributed by Diego Novillo <dnovillo@redhat.com>
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify
8
it under the terms of the GNU General Public License as published by
9
the Free Software Foundation; either version 3, or (at your option)
10
any later version.
11
 
12
GCC is distributed in the hope that it will be useful,
13
but WITHOUT ANY WARRANTY; without even the implied warranty of
14
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15
GNU General Public License for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "tree.h"
26
#include "rtl.h"
27
#include "tm_p.h"
28
#include "hard-reg-set.h"
29
#include "basic-block.h"
30
#include "output.h"
31
#include "expr.h"
32
#include "diagnostic.h"
33
#include "basic-block.h"
34
#include "flags.h"
35
#include "tree-flow.h"
36
#include "tree-dump.h"
37
#include "timevar.h"
38
#include "function.h"
39
#include "langhooks.h"
40
#include "toplev.h"
41
#include "flags.h"
42
#include "cgraph.h"
43
#include "tree-inline.h"
44
#include "tree-mudflap.h"
45
#include "tree-pass.h"
46
#include "ggc.h"
47
#include "cgraph.h"
48
#include "graph.h"
49
#include "cfgloop.h"
50
#include "except.h"
51
 
52
 
53
/* Gate: execute, or not, all of the non-trivial optimizations.  */
54
 
55
static bool
56
gate_all_optimizations (void)
57
{
58
  return (optimize >= 1
59
          /* Don't bother doing anything if the program has errors.  */
60
          && !(errorcount || sorrycount));
61
}
62
 
63
struct tree_opt_pass pass_all_optimizations =
64
{
65
  NULL,                                 /* name */
66
  gate_all_optimizations,               /* gate */
67
  NULL,                                 /* execute */
68
  NULL,                                 /* sub */
69
  NULL,                                 /* next */
70
  0,                                     /* static_pass_number */
71
  0,                                     /* tv_id */
72
  0,                                     /* properties_required */
73
  0,                                     /* properties_provided */
74
  0,                                     /* properties_destroyed */
75
  0,                                     /* todo_flags_start */
76
  0,                                     /* todo_flags_finish */
77
 
78
};
79
 
80
struct tree_opt_pass pass_early_local_passes =
81
{
82
  NULL,                                 /* name */
83
  gate_all_optimizations,               /* gate */
84
  NULL,                                 /* execute */
85
  NULL,                                 /* sub */
86
  NULL,                                 /* next */
87
  0,                                     /* static_pass_number */
88
  0,                                     /* tv_id */
89
  0,                                     /* properties_required */
90
  0,                                     /* properties_provided */
91
  0,                                     /* properties_destroyed */
92
  0,                                     /* todo_flags_start */
93
  0,                                     /* todo_flags_finish */
94
 
95
};
96
 
97
/* Pass: cleanup the CFG just before expanding trees to RTL.
98
   This is just a round of label cleanups and case node grouping
99
   because after the tree optimizers have run such cleanups may
100
   be necessary.  */
101
 
102
static unsigned int
103
execute_cleanup_cfg_pre_ipa (void)
104
{
105
  cleanup_tree_cfg ();
106
  return 0;
107
}
108
 
109
struct tree_opt_pass pass_cleanup_cfg =
110
{
111
  "cleanup_cfg",                        /* name */
112
  NULL,                                 /* gate */
113
  execute_cleanup_cfg_pre_ipa,          /* execute */
114
  NULL,                                 /* sub */
115
  NULL,                                 /* next */
116
  0,                                     /* static_pass_number */
117
  0,                                     /* tv_id */
118
  PROP_cfg,                             /* properties_required */
119
  0,                                     /* properties_provided */
120
  0,                                     /* properties_destroyed */
121
  0,                                     /* todo_flags_start */
122
  TODO_dump_func,                                       /* todo_flags_finish */
123
 
124
};
125
 
126
 
127
/* Pass: cleanup the CFG just before expanding trees to RTL.
128
   This is just a round of label cleanups and case node grouping
129
   because after the tree optimizers have run such cleanups may
130
   be necessary.  */
131
 
132
static unsigned int
133
execute_cleanup_cfg_post_optimizing (void)
134
{
135
  fold_cond_expr_cond ();
136
  cleanup_tree_cfg ();
137
  cleanup_dead_labels ();
138
  group_case_labels ();
139
  return 0;
140
}
141
 
142
struct tree_opt_pass pass_cleanup_cfg_post_optimizing =
143
{
144
  "final_cleanup",                      /* name */
145
  NULL,                                 /* gate */
146
  execute_cleanup_cfg_post_optimizing,  /* execute */
147
  NULL,                                 /* sub */
148
  NULL,                                 /* next */
149
  0,                                     /* static_pass_number */
150
  0,                                     /* tv_id */
151
  PROP_cfg,                             /* properties_required */
152
  0,                                     /* properties_provided */
153
  0,                                     /* properties_destroyed */
154
  0,                                     /* todo_flags_start */
155
  TODO_dump_func,                                       /* todo_flags_finish */
156
 
157
};
158
 
159
/* Pass: do the actions required to finish with tree-ssa optimization
160
   passes.  */
161
 
162
static unsigned int
163
execute_free_datastructures (void)
164
{
165
  /* ??? This isn't the right place for this.  Worse, it got computed
166
     more or less at random in various passes.  */
167
  free_dominance_info (CDI_DOMINATORS);
168
  free_dominance_info (CDI_POST_DOMINATORS);
169
 
170
  /* Remove the ssa structures.  Do it here since this includes statement
171
     annotations that need to be intact during disband_implicit_edges.  */
172
  delete_tree_ssa ();
173
  return 0;
174
}
175
 
176
struct tree_opt_pass pass_free_datastructures =
177
{
178
  NULL,                                 /* name */
179
  NULL,                                 /* gate */
180
  execute_free_datastructures,                  /* execute */
181
  NULL,                                 /* sub */
182
  NULL,                                 /* next */
183
  0,                                     /* static_pass_number */
184
  0,                                     /* tv_id */
185
  PROP_cfg,                             /* properties_required */
186
  0,                                     /* properties_provided */
187
  0,                                     /* properties_destroyed */
188
  0,                                     /* todo_flags_start */
189
  0,                                     /* todo_flags_finish */
190
 
191
};
192
/* Pass: free cfg annotations.  */
193
 
194
static unsigned int
195
execute_free_cfg_annotations (void)
196
{
197
  basic_block bb;
198
  block_stmt_iterator bsi;
199
 
200
  /* Emit gotos for implicit jumps.  */
201
  disband_implicit_edges ();
202
 
203
  /* Remove annotations from every tree in the function.  */
204
  FOR_EACH_BB (bb)
205
    for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
206
      {
207
        tree stmt = bsi_stmt (bsi);
208
        ggc_free (stmt->common.ann);
209
        stmt->common.ann = NULL;
210
      }
211
 
212
  /* And get rid of annotations we no longer need.  */
213
  delete_tree_cfg_annotations ();
214
 
215
#ifdef ENABLE_CHECKING
216
  /* Once the statement annotations have been removed, we can verify
217
     the integrity of statements in the EH throw table.  */
218
  verify_eh_throw_table_statements ();
219
#endif
220
  return 0;
221
}
222
 
223
struct tree_opt_pass pass_free_cfg_annotations =
224
{
225
  NULL,                                 /* name */
226
  NULL,                                 /* gate */
227
  execute_free_cfg_annotations,         /* execute */
228
  NULL,                                 /* sub */
229
  NULL,                                 /* next */
230
  0,                                     /* static_pass_number */
231
  0,                                     /* tv_id */
232
  PROP_cfg,                             /* properties_required */
233
  0,                                     /* properties_provided */
234
  0,                                     /* properties_destroyed */
235
  0,                                     /* todo_flags_start */
236
  0,                                     /* todo_flags_finish */
237
 
238
};
239
 
240
/* Return true if BB has at least one abnormal outgoing edge.  */
241
 
242
static inline bool
243
has_abnormal_outgoing_edge_p (basic_block bb)
244
{
245
  edge e;
246
  edge_iterator ei;
247
 
248
  FOR_EACH_EDGE (e, ei, bb->succs)
249
    if (e->flags & EDGE_ABNORMAL)
250
      return true;
251
 
252
  return false;
253
}
254
 
255
/* Pass: fixup_cfg.  IPA passes, compilation of earlier functions or inlining
256
   might have changed some properties, such as marked functions nothrow or
257
   added calls that can potentially go to non-local labels.  Remove redundant
258
   edges and basic blocks, and create new ones if necessary.  */
259
 
260
static unsigned int
261
execute_fixup_cfg (void)
262
{
263
  basic_block bb;
264
  block_stmt_iterator bsi;
265
 
266
  if (cfun->eh)
267
    FOR_EACH_BB (bb)
268
      {
269
        for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
270
          {
271
            tree stmt = bsi_stmt (bsi);
272
            tree call = get_call_expr_in (stmt);
273
 
274
            if (call && call_expr_flags (call) & (ECF_CONST | ECF_PURE))
275
              TREE_SIDE_EFFECTS (call) = 0;
276
            if (!tree_could_throw_p (stmt) && lookup_stmt_eh_region (stmt))
277
              remove_stmt_from_eh_region (stmt);
278
          }
279
        tree_purge_dead_eh_edges (bb);
280
      }
281
 
282
  if (current_function_has_nonlocal_label)
283
    FOR_EACH_BB (bb)
284
      {
285
        for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
286
          {
287
            tree stmt = bsi_stmt (bsi);
288
            if (tree_can_make_abnormal_goto (stmt))
289
              {
290
                if (stmt == bsi_stmt (bsi_last (bb)))
291
                  {
292
                    if (!has_abnormal_outgoing_edge_p (bb))
293
                      make_abnormal_goto_edges (bb, true);
294
                  }
295
                else
296
                  {
297
                    edge e = split_block (bb, stmt);
298
                    bb = e->src;
299
                    make_abnormal_goto_edges (bb, true);
300
                  }
301
                break;
302
              }
303
          }
304
      }
305
 
306
  cleanup_tree_cfg ();
307
 
308
  /* Dump a textual representation of the flowgraph.  */
309
  if (dump_file)
310
    dump_tree_cfg (dump_file, dump_flags);
311
 
312
  return 0;
313
}
314
 
315
struct tree_opt_pass pass_fixup_cfg =
316
{
317
  "fixupcfg",                           /* name */
318
  NULL,                                 /* gate */
319
  execute_fixup_cfg,                    /* execute */
320
  NULL,                                 /* sub */
321
  NULL,                                 /* next */
322
  0,                                     /* static_pass_number */
323
  0,                                     /* tv_id */
324
  PROP_cfg,                             /* properties_required */
325
  0,                                     /* properties_provided */
326
  0,                                     /* properties_destroyed */
327
  0,                                     /* todo_flags_start */
328
  0,                                     /* todo_flags_finish */
329
 
330
};
331
 
332
/* Do the actions required to initialize internal data structures used
333
   in tree-ssa optimization passes.  */
334
 
335
static unsigned int
336
execute_init_datastructures (void)
337
{
338
  /* Allocate hash tables, arrays and other structures.  */
339
  init_tree_ssa ();
340
  return 0;
341
}
342
 
343
struct tree_opt_pass pass_init_datastructures =
344
{
345
  NULL,                                 /* name */
346
  NULL,                                 /* gate */
347
  execute_init_datastructures,          /* execute */
348
  NULL,                                 /* sub */
349
  NULL,                                 /* next */
350
  0,                                     /* static_pass_number */
351
  0,                                     /* tv_id */
352
  PROP_cfg,                             /* properties_required */
353
  0,                                     /* properties_provided */
354
  0,                                     /* properties_destroyed */
355
  0,                                     /* todo_flags_start */
356
  0,                                     /* todo_flags_finish */
357
 
358
};
359
 
360
void
361
tree_lowering_passes (tree fn)
362
{
363
  tree saved_current_function_decl = current_function_decl;
364
 
365
  current_function_decl = fn;
366
  push_cfun (DECL_STRUCT_FUNCTION (fn));
367
  tree_register_cfg_hooks ();
368
  bitmap_obstack_initialize (NULL);
369
  execute_pass_list (all_lowering_passes);
370
  free_dominance_info (CDI_POST_DOMINATORS);
371
  compact_blocks ();
372
  current_function_decl = saved_current_function_decl;
373
  bitmap_obstack_release (NULL);
374
  pop_cfun ();
375
}
376
 
377
/* Update recursively all inlined_to pointers of functions
378
   inlined into NODE to INLINED_TO.  */
379
static void
380
update_inlined_to_pointers (struct cgraph_node *node,
381
                            struct cgraph_node *inlined_to)
382
{
383
  struct cgraph_edge *e;
384
  for (e = node->callees; e; e = e->next_callee)
385
    {
386
      if (e->callee->global.inlined_to)
387
        {
388
          e->callee->global.inlined_to = inlined_to;
389
          update_inlined_to_pointers (e->callee, inlined_to);
390
        }
391
    }
392
}
393
 
394
 
395
/* For functions-as-trees languages, this performs all optimization and
396
   compilation for FNDECL.  */
397
 
398
void
399
tree_rest_of_compilation (tree fndecl)
400
{
401
  location_t saved_loc;
402
  struct cgraph_node *node;
403
 
404
  timevar_push (TV_EXPAND);
405
 
406
  gcc_assert (!flag_unit_at_a_time || cgraph_global_info_ready);
407
 
408
  node = cgraph_node (fndecl);
409
 
410
  /* We might need the body of this function so that we can expand
411
     it inline somewhere else.  */
412
  if (cgraph_preserve_function_body_p (fndecl))
413
    save_inline_function_body (node);
414
 
415
  /* Initialize the RTL code for the function.  */
416
  current_function_decl = fndecl;
417
  saved_loc = input_location;
418
  input_location = DECL_SOURCE_LOCATION (fndecl);
419
  init_function_start (fndecl);
420
 
421
  /* Even though we're inside a function body, we still don't want to
422
     call expand_expr to calculate the size of a variable-sized array.
423
     We haven't necessarily assigned RTL to all variables yet, so it's
424
     not safe to try to expand expressions involving them.  */
425
  cfun->x_dont_save_pending_sizes_p = 1;
426
  cfun->after_inlining = true;
427
 
428
  if (flag_inline_trees)
429
    {
430
      struct cgraph_edge *e;
431
      for (e = node->callees; e; e = e->next_callee)
432
        if (!e->inline_failed || warn_inline)
433
          break;
434
      if (e)
435
        {
436
          timevar_push (TV_INTEGRATION);
437
          optimize_inline_calls (fndecl);
438
          timevar_pop (TV_INTEGRATION);
439
        }
440
    }
441
  /* In non-unit-at-a-time we must mark all referenced functions as needed.
442
     */
443
  if (!flag_unit_at_a_time)
444
    {
445
      struct cgraph_edge *e;
446
      for (e = node->callees; e; e = e->next_callee)
447
        if (e->callee->analyzed)
448
          cgraph_mark_needed_node (e->callee);
449
    }
450
 
451
  /* We are not going to maintain the cgraph edges up to date.
452
     Kill it so it won't confuse us.  */
453
  cgraph_node_remove_callees (node);
454
 
455
 
456
  /* Initialize the default bitmap obstack.  */
457
  bitmap_obstack_initialize (NULL);
458
  bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
459
 
460
  tree_register_cfg_hooks ();
461
  /* Perform all tree transforms and optimizations.  */
462
  execute_pass_list (all_passes);
463
 
464
  bitmap_obstack_release (&reg_obstack);
465
 
466
  /* Release the default bitmap obstack.  */
467
  bitmap_obstack_release (NULL);
468
 
469
  DECL_SAVED_TREE (fndecl) = NULL;
470
  cfun = 0;
471
 
472
  /* If requested, warn about function definitions where the function will
473
     return a value (usually of some struct or union type) which itself will
474
     take up a lot of stack space.  */
475
  if (warn_larger_than && !DECL_EXTERNAL (fndecl) && TREE_TYPE (fndecl))
476
    {
477
      tree ret_type = TREE_TYPE (TREE_TYPE (fndecl));
478
 
479
      if (ret_type && TYPE_SIZE_UNIT (ret_type)
480
          && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
481
          && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
482
                                   larger_than_size))
483
        {
484
          unsigned int size_as_int
485
            = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
486
 
487
          if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
488
            warning (0, "size of return value of %q+D is %u bytes",
489
                     fndecl, size_as_int);
490
          else
491
            warning (0, "size of return value of %q+D is larger than %wd bytes",
492
                     fndecl, larger_than_size);
493
        }
494
    }
495
 
496
  if (!flag_inline_trees)
497
    {
498
      DECL_SAVED_TREE (fndecl) = NULL;
499
      if (DECL_STRUCT_FUNCTION (fndecl) == 0
500
          && !cgraph_node (fndecl)->origin)
501
        {
502
          /* Stop pointing to the local nodes about to be freed.
503
             But DECL_INITIAL must remain nonzero so we know this
504
             was an actual function definition.
505
             For a nested function, this is done in c_pop_function_context.
506
             If rest_of_compilation set this to 0, leave it 0.  */
507
          if (DECL_INITIAL (fndecl) != 0)
508
            DECL_INITIAL (fndecl) = error_mark_node;
509
        }
510
    }
511
 
512
  input_location = saved_loc;
513
 
514
  ggc_collect ();
515
  timevar_pop (TV_EXPAND);
516
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.