OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-stable/] [gcc-4.5.1/] [gcc/] [tree-ssa-dse.c] - Blame information for rev 841

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Dead store elimination
2
   Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify
8
it under the terms of the GNU General Public License as published by
9
the Free Software Foundation; either version 3, or (at your option)
10
any later version.
11
 
12
GCC is distributed in the hope that it will be useful,
13
but WITHOUT ANY WARRANTY; without even the implied warranty of
14
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15
GNU General Public License for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "ggc.h"
26
#include "tree.h"
27
#include "rtl.h"
28
#include "tm_p.h"
29
#include "basic-block.h"
30
#include "timevar.h"
31
#include "diagnostic.h"
32
#include "tree-flow.h"
33
#include "tree-pass.h"
34
#include "tree-dump.h"
35
#include "domwalk.h"
36
#include "flags.h"
37
#include "langhooks.h"
38
 
39
/* This file implements dead store elimination.
40
 
41
   A dead store is a store into a memory location which will later be
42
   overwritten by another store without any intervening loads.  In this
43
   case the earlier store can be deleted.
44
 
45
   In our SSA + virtual operand world we use immediate uses of virtual
46
   operands to detect dead stores.  If a store's virtual definition
47
   is used precisely once by a later store to the same location which
48
   post dominates the first store, then the first store is dead.
49
 
50
   The single use of the store's virtual definition ensures that
51
   there are no intervening aliased loads and the requirement that
52
   the second load post dominate the first ensures that if the earlier
53
   store executes, then the later stores will execute before the function
54
   exits.
55
 
56
   It may help to think of this as first moving the earlier store to
57
   the point immediately before the later store.  Again, the single
58
   use of the virtual definition and the post-dominance relationship
59
   ensure that such movement would be safe.  Clearly if there are
60
   back to back stores, then the second is redundant.
61
 
62
   Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
63
   may also help in understanding this code since it discusses the
64
   relationship between dead store and redundant load elimination.  In
65
   fact, they are the same transformation applied to different views of
66
   the CFG.  */
67
 
68
 
69
struct dse_global_data
70
{
71
  /* This is the global bitmap for store statements.
72
 
73
     Each statement has a unique ID.  When we encounter a store statement
74
     that we want to record, set the bit corresponding to the statement's
75
     unique ID in this bitmap.  */
76
  bitmap stores;
77
};
78
 
79
/* We allocate a bitmap-per-block for stores which are encountered
80
   during the scan of that block.  This allows us to restore the
81
   global bitmap of stores when we finish processing a block.  */
82
struct dse_block_local_data
83
{
84
  bitmap stores;
85
};
86
 
87
static bool gate_dse (void);
88
static unsigned int tree_ssa_dse (void);
89
static void dse_initialize_block_local_data (struct dom_walk_data *,
90
                                             basic_block,
91
                                             bool);
92
static void dse_enter_block (struct dom_walk_data *, basic_block);
93
static void dse_leave_block (struct dom_walk_data *, basic_block);
94
static void record_voperand_set (bitmap, bitmap *, unsigned int);
95
 
96
/* Returns uid of statement STMT.  */
97
 
98
static unsigned
99
get_stmt_uid (gimple stmt)
100
{
101
  if (gimple_code (stmt) == GIMPLE_PHI)
102
    return SSA_NAME_VERSION (gimple_phi_result (stmt))
103
           + gimple_stmt_max_uid (cfun);
104
 
105
  return gimple_uid (stmt);
106
}
107
 
108
/* Set bit UID in bitmaps GLOBAL and *LOCAL, creating *LOCAL as needed.  */
109
 
110
static void
111
record_voperand_set (bitmap global, bitmap *local, unsigned int uid)
112
{
113
  /* Lazily allocate the bitmap.  Note that we do not get a notification
114
     when the block local data structures die, so we allocate the local
115
     bitmap backed by the GC system.  */
116
  if (*local == NULL)
117
    *local = BITMAP_GGC_ALLOC ();
118
 
119
  /* Set the bit in the local and global bitmaps.  */
120
  bitmap_set_bit (*local, uid);
121
  bitmap_set_bit (global, uid);
122
}
123
 
124
/* Initialize block local data structures.  */
125
 
126
static void
127
dse_initialize_block_local_data (struct dom_walk_data *walk_data,
128
                                 basic_block bb ATTRIBUTE_UNUSED,
129
                                 bool recycled)
130
{
131
  struct dse_block_local_data *bd
132
    = (struct dse_block_local_data *)
133
        VEC_last (void_p, walk_data->block_data_stack);
134
 
135
  /* If we are given a recycled block local data structure, ensure any
136
     bitmap associated with the block is cleared.  */
137
  if (recycled)
138
    {
139
      if (bd->stores)
140
        bitmap_clear (bd->stores);
141
    }
142
}
143
 
144
/* A helper of dse_optimize_stmt.
145
   Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that
146
   may prove STMT to be dead.
147
   Return TRUE if the above conditions are met, otherwise FALSE.  */
148
 
149
static bool
150
dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
151
{
152
  gimple temp;
153
  unsigned cnt = 0;
154
 
155
  *use_stmt = NULL;
156
 
157
  /* Find the first dominated statement that clobbers (part of) the
158
     memory stmt stores to with no intermediate statement that may use
159
     part of the memory stmt stores.  That is, find a store that may
160
     prove stmt to be a dead store.  */
161
  temp = stmt;
162
  do
163
    {
164
      gimple use_stmt;
165
      imm_use_iterator ui;
166
      bool fail = false;
167
      tree defvar;
168
 
169
      /* Limit stmt walking to be linear in the number of possibly
170
         dead stores.  */
171
      if (++cnt > 256)
172
        return false;
173
 
174
      if (gimple_code (temp) == GIMPLE_PHI)
175
        defvar = PHI_RESULT (temp);
176
      else
177
        defvar = gimple_vdef (temp);
178
      temp = NULL;
179
      FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
180
        {
181
          cnt++;
182
 
183
          /* If we ever reach our DSE candidate stmt again fail.  We
184
             cannot handle dead stores in loops.  */
185
          if (use_stmt == stmt)
186
            {
187
              fail = true;
188
              BREAK_FROM_IMM_USE_STMT (ui);
189
            }
190
          /* In simple cases we can look through PHI nodes, but we
191
             have to be careful with loops and with memory references
192
             containing operands that are also operands of PHI nodes.
193
             See gcc.c-torture/execute/20051110-*.c.  */
194
          else if (gimple_code (use_stmt) == GIMPLE_PHI)
195
            {
196
              if (temp
197
                  /* Make sure we are not in a loop latch block.  */
198
                  || gimple_bb (stmt) == gimple_bb (use_stmt)
199
                  || dominated_by_p (CDI_DOMINATORS,
200
                                     gimple_bb (stmt), gimple_bb (use_stmt))
201
                  /* We can look through PHIs to regions post-dominating
202
                     the DSE candidate stmt.  */
203
                  || !dominated_by_p (CDI_POST_DOMINATORS,
204
                                      gimple_bb (stmt), gimple_bb (use_stmt)))
205
                {
206
                  fail = true;
207
                  BREAK_FROM_IMM_USE_STMT (ui);
208
                }
209
              temp = use_stmt;
210
            }
211
          /* If the statement is a use the store is not dead.  */
212
          else if (ref_maybe_used_by_stmt_p (use_stmt,
213
                                             gimple_assign_lhs (stmt)))
214
            {
215
              fail = true;
216
              BREAK_FROM_IMM_USE_STMT (ui);
217
            }
218
          /* If this is a store, remember it or bail out if we have
219
             multiple ones (the will be in different CFG parts then).  */
220
          else if (gimple_vdef (use_stmt))
221
            {
222
              if (temp)
223
                {
224
                  fail = true;
225
                  BREAK_FROM_IMM_USE_STMT (ui);
226
                }
227
              temp = use_stmt;
228
            }
229
        }
230
 
231
      if (fail)
232
        return false;
233
 
234
      /* If we didn't find any definition this means the store is dead
235
         if it isn't a store to global reachable memory.  In this case
236
         just pretend the stmt makes itself dead.  Otherwise fail.  */
237
      if (!temp)
238
        {
239
          if (is_hidden_global_store (stmt))
240
            return false;
241
 
242
          temp = stmt;
243
          break;
244
        }
245
    }
246
  /* We deliberately stop on clobbering statements and not only on
247
     killing ones to make walking cheaper.  Otherwise we can just
248
     continue walking until both stores have equal reference trees.  */
249
  while (!stmt_may_clobber_ref_p (temp, gimple_assign_lhs (stmt)));
250
 
251
  if (!is_gimple_assign (temp))
252
    return false;
253
 
254
  *use_stmt = temp;
255
 
256
  return true;
257
}
258
 
259
 
260
/* Attempt to eliminate dead stores in the statement referenced by BSI.
261
 
262
   A dead store is a store into a memory location which will later be
263
   overwritten by another store without any intervening loads.  In this
264
   case the earlier store can be deleted.
265
 
266
   In our SSA + virtual operand world we use immediate uses of virtual
267
   operands to detect dead stores.  If a store's virtual definition
268
   is used precisely once by a later store to the same location which
269
   post dominates the first store, then the first store is dead.  */
270
 
271
static void
272
dse_optimize_stmt (struct dse_global_data *dse_gd,
273
                   struct dse_block_local_data *bd,
274
                   gimple_stmt_iterator gsi)
275
{
276
  gimple stmt = gsi_stmt (gsi);
277
 
278
  /* If this statement has no virtual defs, then there is nothing
279
     to do.  */
280
  if (!gimple_vdef (stmt))
281
    return;
282
 
283
  /* We know we have virtual definitions.  If this is a GIMPLE_ASSIGN
284
     that's not also a function call, then record it into our table.  */
285
  if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
286
    return;
287
 
288
  if (gimple_has_volatile_ops (stmt))
289
    return;
290
 
291
  if (is_gimple_assign (stmt))
292
    {
293
      gimple use_stmt;
294
 
295
      record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt));
296
 
297
      if (!dse_possible_dead_store_p (stmt, &use_stmt))
298
        return;
299
 
300
      /* If we have precisely one immediate use at this point and the
301
         stores are to the same memory location or there is a chain of
302
         virtual uses from stmt and the stmt which stores to that same
303
         memory location, then we may have found redundant store.  */
304
      if (bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
305
          && operand_equal_p (gimple_assign_lhs (stmt),
306
                              gimple_assign_lhs (use_stmt), 0))
307
        {
308
          /* If use_stmt is or might be a nop assignment, e.g. for
309
             struct { ... } S a, b, *p; ...
310
             b = a; b = b;
311
             or
312
             b = a; b = *p; where p might be &b,
313
             or
314
             *p = a; *p = b; where p might be &b,
315
             or
316
             *p = *u; *p = *v; where p might be v, then USE_STMT
317
             acts as a use as well as definition, so store in STMT
318
             is not dead.  */
319
          if (stmt != use_stmt
320
              && !is_gimple_reg (gimple_assign_rhs1 (use_stmt))
321
              && !is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))
322
              /* ???  Should {} be invariant?  */
323
              && gimple_assign_rhs_code (use_stmt) != CONSTRUCTOR
324
              && refs_may_alias_p (gimple_assign_lhs (use_stmt),
325
                                   gimple_assign_rhs1 (use_stmt)))
326
            return;
327
 
328
          if (dump_file && (dump_flags & TDF_DETAILS))
329
            {
330
              fprintf (dump_file, "  Deleted dead store '");
331
              print_gimple_stmt (dump_file, gsi_stmt (gsi), dump_flags, 0);
332
              fprintf (dump_file, "'\n");
333
            }
334
 
335
          /* Then we need to fix the operand of the consuming stmt.  */
336
          unlink_stmt_vdef (stmt);
337
 
338
          /* Remove the dead store.  */
339
          gsi_remove (&gsi, true);
340
 
341
          /* And release any SSA_NAMEs set in this statement back to the
342
             SSA_NAME manager.  */
343
          release_defs (stmt);
344
        }
345
    }
346
}
347
 
348
/* Record that we have seen the PHIs at the start of BB which correspond
349
   to virtual operands.  */
350
static void
351
dse_record_phi (struct dse_global_data *dse_gd,
352
                struct dse_block_local_data *bd,
353
                gimple phi)
354
{
355
  if (!is_gimple_reg (gimple_phi_result (phi)))
356
    record_voperand_set (dse_gd->stores, &bd->stores, get_stmt_uid (phi));
357
}
358
 
359
static void
360
dse_enter_block (struct dom_walk_data *walk_data, basic_block bb)
361
{
362
  struct dse_block_local_data *bd
363
    = (struct dse_block_local_data *)
364
        VEC_last (void_p, walk_data->block_data_stack);
365
  struct dse_global_data *dse_gd
366
    = (struct dse_global_data *) walk_data->global_data;
367
  gimple_stmt_iterator gsi;
368
 
369
  for (gsi = gsi_last (bb_seq (bb)); !gsi_end_p (gsi); gsi_prev (&gsi))
370
    dse_optimize_stmt (dse_gd, bd, gsi);
371
  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
372
    dse_record_phi (dse_gd, bd, gsi_stmt (gsi));
373
}
374
 
375
static void
376
dse_leave_block (struct dom_walk_data *walk_data,
377
                 basic_block bb ATTRIBUTE_UNUSED)
378
{
379
  struct dse_block_local_data *bd
380
    = (struct dse_block_local_data *)
381
        VEC_last (void_p, walk_data->block_data_stack);
382
  struct dse_global_data *dse_gd
383
    = (struct dse_global_data *) walk_data->global_data;
384
  bitmap stores = dse_gd->stores;
385
  unsigned int i;
386
  bitmap_iterator bi;
387
 
388
  /* Unwind the stores noted in this basic block.  */
389
  if (bd->stores)
390
    EXECUTE_IF_SET_IN_BITMAP (bd->stores, 0, i, bi)
391
      {
392
        bitmap_clear_bit (stores, i);
393
      }
394
}
395
 
396
/* Main entry point.  */
397
 
398
static unsigned int
399
tree_ssa_dse (void)
400
{
401
  struct dom_walk_data walk_data;
402
  struct dse_global_data dse_gd;
403
 
404
  renumber_gimple_stmt_uids ();
405
 
406
  /* We might consider making this a property of each pass so that it
407
     can be [re]computed on an as-needed basis.  Particularly since
408
     this pass could be seen as an extension of DCE which needs post
409
     dominators.  */
410
  calculate_dominance_info (CDI_POST_DOMINATORS);
411
  calculate_dominance_info (CDI_DOMINATORS);
412
 
413
  /* Dead store elimination is fundamentally a walk of the post-dominator
414
     tree and a backwards walk of statements within each block.  */
415
  walk_data.dom_direction = CDI_POST_DOMINATORS;
416
  walk_data.initialize_block_local_data = dse_initialize_block_local_data;
417
  walk_data.before_dom_children = dse_enter_block;
418
  walk_data.after_dom_children = dse_leave_block;
419
 
420
  walk_data.block_local_data_size = sizeof (struct dse_block_local_data);
421
 
422
  /* This is the main hash table for the dead store elimination pass.  */
423
  dse_gd.stores = BITMAP_ALLOC (NULL);
424
  walk_data.global_data = &dse_gd;
425
 
426
  /* Initialize the dominator walker.  */
427
  init_walk_dominator_tree (&walk_data);
428
 
429
  /* Recursively walk the dominator tree.  */
430
  walk_dominator_tree (&walk_data, EXIT_BLOCK_PTR);
431
 
432
  /* Finalize the dominator walker.  */
433
  fini_walk_dominator_tree (&walk_data);
434
 
435
  /* Release the main bitmap.  */
436
  BITMAP_FREE (dse_gd.stores);
437
 
438
  /* For now, just wipe the post-dominator information.  */
439
  free_dominance_info (CDI_POST_DOMINATORS);
440
  return 0;
441
}
442
 
443
static bool
444
gate_dse (void)
445
{
446
  return flag_tree_dse != 0;
447
}
448
 
449
struct gimple_opt_pass pass_dse =
450
{
451
 {
452
  GIMPLE_PASS,
453
  "dse",                        /* name */
454
  gate_dse,                     /* gate */
455
  tree_ssa_dse,                 /* execute */
456
  NULL,                         /* sub */
457
  NULL,                         /* next */
458
  0,                             /* static_pass_number */
459
  TV_TREE_DSE,                  /* tv_id */
460
  PROP_cfg | PROP_ssa,          /* properties_required */
461
  0,                             /* properties_provided */
462
  0,                             /* properties_destroyed */
463
  0,                             /* todo_flags_start */
464
  TODO_dump_func
465
    | TODO_ggc_collect
466
    | TODO_verify_ssa           /* todo_flags_finish */
467
 }
468
};
469
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.