OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [tree-ssa-operands.c] - Blame information for rev 280

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* SSA operands management for trees.
2
   Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify
8
it under the terms of the GNU General Public License as published by
9
the Free Software Foundation; either version 3, or (at your option)
10
any later version.
11
 
12
GCC is distributed in the hope that it will be useful,
13
but WITHOUT ANY WARRANTY; without even the implied warranty of
14
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15
GNU General Public License for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "tree.h"
26
#include "flags.h"
27
#include "function.h"
28
#include "diagnostic.h"
29
#include "tree-flow.h"
30
#include "tree-inline.h"
31
#include "tree-pass.h"
32
#include "ggc.h"
33
#include "timevar.h"
34
#include "toplev.h"
35
#include "langhooks.h"
36
#include "ipa-reference.h"
37
 
38
/* This file contains the code required to manage the operands cache of the
39
   SSA optimizer.  For every stmt, we maintain an operand cache in the stmt
40
   annotation.  This cache contains operands that will be of interest to
41
   optimizers and other passes wishing to manipulate the IL.
42
 
43
   The operand type are broken up into REAL and VIRTUAL operands.  The real
44
   operands are represented as pointers into the stmt's operand tree.  Thus
45
   any manipulation of the real operands will be reflected in the actual tree.
46
   Virtual operands are represented solely in the cache, although the base
47
   variable for the SSA_NAME may, or may not occur in the stmt's tree.
48
   Manipulation of the virtual operands will not be reflected in the stmt tree.
49
 
50
   The routines in this file are concerned with creating this operand cache
51
   from a stmt tree.
52
 
53
   The operand tree is the parsed by the various get_* routines which look
54
   through the stmt tree for the occurrence of operands which may be of
55
   interest, and calls are made to the append_* routines whenever one is
56
   found.  There are 4 of these routines, each representing one of the
57
   4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
58
 
59
   The append_* routines check for duplication, and simply keep a list of
60
   unique objects for each operand type in the build_* extendable vectors.
61
 
62
   Once the stmt tree is completely parsed, the finalize_ssa_operands()
63
   routine is called, which proceeds to perform the finalization routine
64
   on each of the 4 operand vectors which have been built up.
65
 
66
   If the stmt had a previous operand cache, the finalization routines
67
   attempt to match up the new operands with the old ones.  If it's a perfect
68
   match, the old vector is simply reused.  If it isn't a perfect match, then
69
   a new vector is created and the new operands are placed there.  For
70
   virtual operands, if the previous cache had SSA_NAME version of a
71
   variable, and that same variable occurs in the same operands cache, then
72
   the new cache vector will also get the same SSA_NAME.
73
 
74
   i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
75
   operand vector for VUSE, then the new vector will also be modified
76
   such that it contains 'a_5' rather than 'a'.  */
77
 
78
/* Structure storing statistics on how many call clobbers we have, and
79
   how many where avoided.  */
80
 
81
static struct
82
{
83
  /* Number of call-clobbered ops we attempt to add to calls in
84
     add_call_clobbered_mem_symbols.  */
85
  unsigned int clobbered_vars;
86
 
87
  /* Number of write-clobbers (VDEFs) avoided by using
88
     not_written information.  */
89
  unsigned int static_write_clobbers_avoided;
90
 
91
  /* Number of reads (VUSEs) avoided by using not_read information.  */
92
  unsigned int static_read_clobbers_avoided;
93
 
94
  /* Number of write-clobbers avoided because the variable can't escape to
95
     this call.  */
96
  unsigned int unescapable_clobbers_avoided;
97
 
98
  /* Number of read-only uses we attempt to add to calls in
99
     add_call_read_mem_symbols.  */
100
  unsigned int readonly_clobbers;
101
 
102
  /* Number of read-only uses we avoid using not_read information.  */
103
  unsigned int static_readonly_clobbers_avoided;
104
} clobber_stats;
105
 
106
 
107
/* Flags to describe operand properties in helpers.  */
108
 
109
/* By default, operands are loaded.  */
110
#define opf_use         0
111
 
112
/* Operand is the target of an assignment expression or a
113
   call-clobbered variable.  */
114
#define opf_def         (1 << 0)
115
 
116
/* No virtual operands should be created in the expression.  This is used
117
   when traversing ADDR_EXPR nodes which have different semantics than
118
   other expressions.  Inside an ADDR_EXPR node, the only operands that we
119
   need to consider are indices into arrays.  For instance, &a.b[i] should
120
   generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
121
   VUSE for 'b'.  */
122
#define opf_no_vops     (1 << 1)
123
 
124
/* Operand is an implicit reference.  This is used to distinguish
125
   explicit assignments in the form of MODIFY_EXPR from
126
   clobbering sites like function calls or ASM_EXPRs.  */
127
#define opf_implicit    (1 << 2)
128
 
129
/* Array for building all the def operands.  */
130
static VEC(tree,heap) *build_defs;
131
 
132
/* Array for building all the use operands.  */
133
static VEC(tree,heap) *build_uses;
134
 
135
/* The built VDEF operand.  */
136
static tree build_vdef;
137
 
138
/* The built VUSE operand.  */
139
static tree build_vuse;
140
 
141
/* Bitmap obstack for our datastructures that needs to survive across
142
   compilations of multiple functions.  */
143
static bitmap_obstack operands_bitmap_obstack;
144
 
145
static void get_expr_operands (gimple, tree *, int);
146
 
147
/* Number of functions with initialized ssa_operands.  */
148
static int n_initialized = 0;
149
 
150
/* Return the DECL_UID of the base variable of T.  */
151
 
152
static inline unsigned
153
get_name_decl (const_tree t)
154
{
155
  if (TREE_CODE (t) != SSA_NAME)
156
    return DECL_UID (t);
157
  else
158
    return DECL_UID (SSA_NAME_VAR (t));
159
}
160
 
161
 
162
/*  Return true if the SSA operands cache is active.  */
163
 
164
bool
165
ssa_operands_active (void)
166
{
167
  /* This function may be invoked from contexts where CFUN is NULL
168
     (IPA passes), return false for now.  FIXME: operands may be
169
     active in each individual function, maybe this function should
170
     take CFUN as a parameter.  */
171
  if (cfun == NULL)
172
    return false;
173
 
174
  return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
175
}
176
 
177
 
178
/* Create the VOP variable, an artificial global variable to act as a
179
   representative of all of the virtual operands FUD chain.  */
180
 
181
static void
182
create_vop_var (void)
183
{
184
  tree global_var;
185
 
186
  gcc_assert (cfun->gimple_df->vop == NULL_TREE);
187
 
188
  global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
189
                           get_identifier (".MEM"),
190
                           void_type_node);
191
  DECL_ARTIFICIAL (global_var) = 1;
192
  TREE_READONLY (global_var) = 0;
193
  DECL_EXTERNAL (global_var) = 1;
194
  TREE_STATIC (global_var) = 1;
195
  TREE_USED (global_var) = 1;
196
  DECL_CONTEXT (global_var) = NULL_TREE;
197
  TREE_THIS_VOLATILE (global_var) = 0;
198
  TREE_ADDRESSABLE (global_var) = 0;
199
 
200
  create_var_ann (global_var);
201
  add_referenced_var (global_var);
202
  cfun->gimple_df->vop = global_var;
203
}
204
 
205
/* These are the sizes of the operand memory buffer in bytes which gets
206
   allocated each time more operands space is required.  The final value is
207
   the amount that is allocated every time after that.
208
   In 1k we can fit 25 use operands (or 63 def operands) on a host with
209
   8 byte pointers, that would be 10 statements each with 1 def and 2
210
   uses.  */
211
 
212
#define OP_SIZE_INIT    0
213
#define OP_SIZE_1       (1024 - sizeof (void *))
214
#define OP_SIZE_2       (1024 * 4 - sizeof (void *))
215
#define OP_SIZE_3       (1024 * 16 - sizeof (void *))
216
 
217
/* Initialize the operand cache routines.  */
218
 
219
void
220
init_ssa_operands (void)
221
{
222
  if (!n_initialized++)
223
    {
224
      build_defs = VEC_alloc (tree, heap, 5);
225
      build_uses = VEC_alloc (tree, heap, 10);
226
      build_vuse = NULL_TREE;
227
      build_vdef = NULL_TREE;
228
      bitmap_obstack_initialize (&operands_bitmap_obstack);
229
    }
230
 
231
  gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
232
  gimple_ssa_operands (cfun)->operand_memory_index
233
     = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
234
  gimple_ssa_operands (cfun)->ops_active = true;
235
  memset (&clobber_stats, 0, sizeof (clobber_stats));
236
  gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
237
  create_vop_var ();
238
}
239
 
240
 
241
/* Dispose of anything required by the operand routines.  */
242
 
243
void
244
fini_ssa_operands (void)
245
{
246
  struct ssa_operand_memory_d *ptr;
247
 
248
  if (!--n_initialized)
249
    {
250
      VEC_free (tree, heap, build_defs);
251
      VEC_free (tree, heap, build_uses);
252
      build_vdef = NULL_TREE;
253
      build_vuse = NULL_TREE;
254
    }
255
 
256
  gimple_ssa_operands (cfun)->free_defs = NULL;
257
  gimple_ssa_operands (cfun)->free_uses = NULL;
258
 
259
  while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
260
    {
261
      gimple_ssa_operands (cfun)->operand_memory
262
        = gimple_ssa_operands (cfun)->operand_memory->next;
263
      ggc_free (ptr);
264
    }
265
 
266
  gimple_ssa_operands (cfun)->ops_active = false;
267
 
268
  if (!n_initialized)
269
    bitmap_obstack_release (&operands_bitmap_obstack);
270
 
271
  cfun->gimple_df->vop = NULL_TREE;
272
 
273
  if (dump_file && (dump_flags & TDF_STATS))
274
    {
275
      fprintf (dump_file, "Original clobbered vars:           %d\n",
276
               clobber_stats.clobbered_vars);
277
      fprintf (dump_file, "Static write clobbers avoided:     %d\n",
278
               clobber_stats.static_write_clobbers_avoided);
279
      fprintf (dump_file, "Static read clobbers avoided:      %d\n",
280
               clobber_stats.static_read_clobbers_avoided);
281
      fprintf (dump_file, "Unescapable clobbers avoided:      %d\n",
282
               clobber_stats.unescapable_clobbers_avoided);
283
      fprintf (dump_file, "Original read-only clobbers:       %d\n",
284
               clobber_stats.readonly_clobbers);
285
      fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
286
               clobber_stats.static_readonly_clobbers_avoided);
287
    }
288
}
289
 
290
 
291
/* Return memory for an operand of size SIZE.  */
292
 
293
static inline void *
294
ssa_operand_alloc (unsigned size)
295
{
296
  char *ptr;
297
 
298
  gcc_assert (size == sizeof (struct use_optype_d)
299
              || size == sizeof (struct def_optype_d));
300
 
301
  if (gimple_ssa_operands (cfun)->operand_memory_index + size
302
      >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
303
    {
304
      struct ssa_operand_memory_d *ptr;
305
 
306
      switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size)
307
        {
308
        case OP_SIZE_INIT:
309
          gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1;
310
          break;
311
        case OP_SIZE_1:
312
          gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2;
313
          break;
314
        case OP_SIZE_2:
315
        case OP_SIZE_3:
316
          gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3;
317
          break;
318
        default:
319
          gcc_unreachable ();
320
        }
321
 
322
      ptr = (struct ssa_operand_memory_d *)
323
              ggc_alloc (sizeof (void *)
324
                         + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
325
      ptr->next = gimple_ssa_operands (cfun)->operand_memory;
326
      gimple_ssa_operands (cfun)->operand_memory = ptr;
327
      gimple_ssa_operands (cfun)->operand_memory_index = 0;
328
    }
329
 
330
  ptr = &(gimple_ssa_operands (cfun)->operand_memory
331
          ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
332
  gimple_ssa_operands (cfun)->operand_memory_index += size;
333
  return ptr;
334
}
335
 
336
 
337
/* Allocate a DEF operand.  */
338
 
339
static inline struct def_optype_d *
340
alloc_def (void)
341
{
342
  struct def_optype_d *ret;
343
  if (gimple_ssa_operands (cfun)->free_defs)
344
    {
345
      ret = gimple_ssa_operands (cfun)->free_defs;
346
      gimple_ssa_operands (cfun)->free_defs
347
        = gimple_ssa_operands (cfun)->free_defs->next;
348
    }
349
  else
350
    ret = (struct def_optype_d *)
351
          ssa_operand_alloc (sizeof (struct def_optype_d));
352
  return ret;
353
}
354
 
355
 
356
/* Allocate a USE operand.  */
357
 
358
static inline struct use_optype_d *
359
alloc_use (void)
360
{
361
  struct use_optype_d *ret;
362
  if (gimple_ssa_operands (cfun)->free_uses)
363
    {
364
      ret = gimple_ssa_operands (cfun)->free_uses;
365
      gimple_ssa_operands (cfun)->free_uses
366
        = gimple_ssa_operands (cfun)->free_uses->next;
367
    }
368
  else
369
    ret = (struct use_optype_d *)
370
          ssa_operand_alloc (sizeof (struct use_optype_d));
371
  return ret;
372
}
373
 
374
 
375
/* Adds OP to the list of defs after LAST.  */
376
 
377
static inline def_optype_p
378
add_def_op (tree *op, def_optype_p last)
379
{
380
  def_optype_p new_def;
381
 
382
  new_def = alloc_def ();
383
  DEF_OP_PTR (new_def) = op;
384
  last->next = new_def;
385
  new_def->next = NULL;
386
  return new_def;
387
}
388
 
389
 
390
/* Adds OP to the list of uses of statement STMT after LAST.  */
391
 
392
static inline use_optype_p
393
add_use_op (gimple stmt, tree *op, use_optype_p last)
394
{
395
  use_optype_p new_use;
396
 
397
  new_use = alloc_use ();
398
  USE_OP_PTR (new_use)->use = op;
399
  link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
400
  last->next = new_use;
401
  new_use->next = NULL;
402
  return new_use;
403
}
404
 
405
 
406
 
407
/* Takes elements from build_defs and turns them into def operands of STMT.
408
   TODO -- Make build_defs VEC of tree *.  */
409
 
410
static inline void
411
finalize_ssa_defs (gimple stmt)
412
{
413
  unsigned new_i;
414
  struct def_optype_d new_list;
415
  def_optype_p old_ops, last;
416
  unsigned int num = VEC_length (tree, build_defs);
417
 
418
  /* There should only be a single real definition per assignment.  */
419
  gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1);
420
 
421
  /* Pre-pend the vdef we may have built.  */
422
  if (build_vdef != NULL_TREE)
423
    {
424
      tree oldvdef = gimple_vdef (stmt);
425
      if (oldvdef
426
          && TREE_CODE (oldvdef) == SSA_NAME)
427
        oldvdef = SSA_NAME_VAR (oldvdef);
428
      if (oldvdef != build_vdef)
429
        gimple_set_vdef (stmt, build_vdef);
430
      VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt));
431
      ++num;
432
    }
433
 
434
  new_list.next = NULL;
435
  last = &new_list;
436
 
437
  old_ops = gimple_def_ops (stmt);
438
 
439
  new_i = 0;
440
 
441
  /* Clear and unlink a no longer necessary VDEF.  */
442
  if (build_vdef == NULL_TREE
443
      && gimple_vdef (stmt) != NULL_TREE)
444
    {
445
      if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
446
        {
447
          unlink_stmt_vdef (stmt);
448
          release_ssa_name (gimple_vdef (stmt));
449
        }
450
      gimple_set_vdef (stmt, NULL_TREE);
451
    }
452
 
453
  /* If we have a non-SSA_NAME VDEF, mark it for renaming.  */
454
  if (gimple_vdef (stmt)
455
      && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
456
    mark_sym_for_renaming (gimple_vdef (stmt));
457
 
458
  /* Check for the common case of 1 def that hasn't changed.  */
459
  if (old_ops && old_ops->next == NULL && num == 1
460
      && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
461
    return;
462
 
463
  /* If there is anything in the old list, free it.  */
464
  if (old_ops)
465
    {
466
      old_ops->next = gimple_ssa_operands (cfun)->free_defs;
467
      gimple_ssa_operands (cfun)->free_defs = old_ops;
468
    }
469
 
470
  /* If there is anything remaining in the build_defs list, simply emit it.  */
471
  for ( ; new_i < num; new_i++)
472
    last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
473
 
474
  /* Now set the stmt's operands.  */
475
  gimple_set_def_ops (stmt, new_list.next);
476
}
477
 
478
 
479
/* Takes elements from build_uses and turns them into use operands of STMT.
480
   TODO -- Make build_uses VEC of tree *.  */
481
 
482
static inline void
483
finalize_ssa_uses (gimple stmt)
484
{
485
  unsigned new_i;
486
  struct use_optype_d new_list;
487
  use_optype_p old_ops, ptr, last;
488
 
489
  /* Pre-pend the VUSE we may have built.  */
490
  if (build_vuse != NULL_TREE)
491
    {
492
      tree oldvuse = gimple_vuse (stmt);
493
      if (oldvuse
494
          && TREE_CODE (oldvuse) == SSA_NAME)
495
        oldvuse = SSA_NAME_VAR (oldvuse);
496
      if (oldvuse != (build_vuse != NULL_TREE
497
                      ? build_vuse : build_vdef))
498
        gimple_set_vuse (stmt, NULL_TREE);
499
      VEC_safe_insert (tree, heap, build_uses, 0, (tree)gimple_vuse_ptr (stmt));
500
    }
501
 
502
  new_list.next = NULL;
503
  last = &new_list;
504
 
505
  old_ops = gimple_use_ops (stmt);
506
 
507
  /* Clear a no longer necessary VUSE.  */
508
  if (build_vuse == NULL_TREE
509
      && gimple_vuse (stmt) != NULL_TREE)
510
    gimple_set_vuse (stmt, NULL_TREE);
511
 
512
  /* If there is anything in the old list, free it.  */
513
  if (old_ops)
514
    {
515
      for (ptr = old_ops; ptr; ptr = ptr->next)
516
        delink_imm_use (USE_OP_PTR (ptr));
517
      old_ops->next = gimple_ssa_operands (cfun)->free_uses;
518
      gimple_ssa_operands (cfun)->free_uses = old_ops;
519
    }
520
 
521
  /* If we added a VUSE, make sure to set the operand if it is not already
522
     present and mark it for renaming.  */
523
  if (build_vuse != NULL_TREE
524
      && gimple_vuse (stmt) == NULL_TREE)
525
    {
526
      gimple_set_vuse (stmt, gimple_vop (cfun));
527
      mark_sym_for_renaming (gimple_vop (cfun));
528
    }
529
 
530
  /* Now create nodes for all the new nodes.  */
531
  for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
532
    last = add_use_op (stmt,
533
                       (tree *) VEC_index (tree, build_uses, new_i),
534
                       last);
535
 
536
  /* Now set the stmt's operands.  */
537
  gimple_set_use_ops (stmt, new_list.next);
538
}
539
 
540
 
541
/* Clear the in_list bits and empty the build array for VDEFs and
542
   VUSEs.  */
543
 
544
static inline void
545
cleanup_build_arrays (void)
546
{
547
  build_vdef = NULL_TREE;
548
  build_vuse = NULL_TREE;
549
  VEC_truncate (tree, build_defs, 0);
550
  VEC_truncate (tree, build_uses, 0);
551
}
552
 
553
 
554
/* Finalize all the build vectors, fill the new ones into INFO.  */
555
 
556
static inline void
557
finalize_ssa_stmt_operands (gimple stmt)
558
{
559
  finalize_ssa_defs (stmt);
560
  finalize_ssa_uses (stmt);
561
  cleanup_build_arrays ();
562
}
563
 
564
 
565
/* Start the process of building up operands vectors in INFO.  */
566
 
567
static inline void
568
start_ssa_stmt_operands (void)
569
{
570
  gcc_assert (VEC_length (tree, build_defs) == 0);
571
  gcc_assert (VEC_length (tree, build_uses) == 0);
572
  gcc_assert (build_vuse == NULL_TREE);
573
  gcc_assert (build_vdef == NULL_TREE);
574
}
575
 
576
 
577
/* Add DEF_P to the list of pointers to operands.  */
578
 
579
static inline void
580
append_def (tree *def_p)
581
{
582
  VEC_safe_push (tree, heap, build_defs, (tree) def_p);
583
}
584
 
585
 
586
/* Add USE_P to the list of pointers to operands.  */
587
 
588
static inline void
589
append_use (tree *use_p)
590
{
591
  VEC_safe_push (tree, heap, build_uses, (tree) use_p);
592
}
593
 
594
 
595
/* Add VAR to the set of variables that require a VDEF operator.  */
596
 
597
static inline void
598
append_vdef (tree var)
599
{
600
  if (!optimize)
601
    return;
602
 
603
  gcc_assert ((build_vdef == NULL_TREE
604
               || build_vdef == var)
605
              && (build_vuse == NULL_TREE
606
                  || build_vuse == var));
607
 
608
  build_vdef = var;
609
  build_vuse = var;
610
}
611
 
612
 
613
/* Add VAR to the set of variables that require a VUSE operator.  */
614
 
615
static inline void
616
append_vuse (tree var)
617
{
618
  if (!optimize)
619
    return;
620
 
621
  gcc_assert (build_vuse == NULL_TREE
622
              || build_vuse == var);
623
 
624
  build_vuse = var;
625
}
626
 
627
/* Add virtual operands for STMT.  FLAGS is as in get_expr_operands.  */
628
 
629
static void
630
add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
631
{
632
  /* Add virtual operands to the stmt, unless the caller has specifically
633
     requested not to do that (used when adding operands inside an
634
     ADDR_EXPR expression).  */
635
  if (flags & opf_no_vops)
636
    return;
637
 
638
  gcc_assert (!is_gimple_debug (stmt));
639
 
640
  if (flags & opf_def)
641
    append_vdef (gimple_vop (cfun));
642
  else
643
    append_vuse (gimple_vop (cfun));
644
}
645
 
646
 
647
/* Add *VAR_P to the appropriate operand array for statement STMT.
648
   FLAGS is as in get_expr_operands.  If *VAR_P is a GIMPLE register,
649
   it will be added to the statement's real operands, otherwise it is
650
   added to virtual operands.  */
651
 
652
static void
653
add_stmt_operand (tree *var_p, gimple stmt, int flags)
654
{
655
  tree var, sym;
656
 
657
  gcc_assert (SSA_VAR_P (*var_p));
658
 
659
  var = *var_p;
660
  sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
661
 
662
  /* Mark statements with volatile operands.  */
663
  if (TREE_THIS_VOLATILE (sym))
664
    gimple_set_has_volatile_ops (stmt, true);
665
 
666
  if (is_gimple_reg (sym))
667
    {
668
      /* The variable is a GIMPLE register.  Add it to real operands.  */
669
      if (flags & opf_def)
670
        append_def (var_p);
671
      else
672
        append_use (var_p);
673
    }
674
  else
675
    add_virtual_operand (stmt, flags);
676
}
677
 
678
/* Mark the base address of REF as having its address taken.
679
   REF may be a single variable whose address has been taken or any
680
   other valid GIMPLE memory reference (structure reference, array,
681
   etc).  */
682
 
683
static void
684
mark_address_taken (tree ref)
685
{
686
  tree var;
687
 
688
  /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
689
     as the only thing we take the address of.  If VAR is a structure,
690
     taking the address of a field means that the whole structure may
691
     be referenced using pointer arithmetic.  See PR 21407 and the
692
     ensuing mailing list discussion.  */
693
  var = get_base_address (ref);
694
  if (var && DECL_P (var))
695
    TREE_ADDRESSABLE (var) = 1;
696
}
697
 
698
 
699
/* A subroutine of get_expr_operands to handle INDIRECT_REF,
700
   ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
701
 
702
   STMT is the statement being processed, EXPR is the INDIRECT_REF
703
      that got us here.
704
 
705
   FLAGS is as in get_expr_operands.
706
 
707
   RECURSE_ON_BASE should be set to true if we want to continue
708
      calling get_expr_operands on the base pointer, and false if
709
      something else will do it for us.  */
710
 
711
static void
712
get_indirect_ref_operands (gimple stmt, tree expr, int flags,
713
                           bool recurse_on_base)
714
{
715
  tree *pptr = &TREE_OPERAND (expr, 0);
716
 
717
  if (TREE_THIS_VOLATILE (expr))
718
    gimple_set_has_volatile_ops (stmt, true);
719
 
720
  /* Add the VOP.  */
721
  add_virtual_operand (stmt, flags);
722
 
723
  /* If requested, add a USE operand for the base pointer.  */
724
  if (recurse_on_base)
725
    get_expr_operands (stmt, pptr,
726
                       opf_use | (flags & opf_no_vops));
727
}
728
 
729
 
730
/* A subroutine of get_expr_operands to handle TARGET_MEM_REF.  */
731
 
732
static void
733
get_tmr_operands (gimple stmt, tree expr, int flags)
734
{
735
  if (TREE_THIS_VOLATILE (expr))
736
    gimple_set_has_volatile_ops (stmt, true);
737
 
738
  /* First record the real operands.  */
739
  get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
740
  get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
741
 
742
  if (TMR_SYMBOL (expr))
743
    mark_address_taken (TMR_SYMBOL (expr));
744
 
745
  add_virtual_operand (stmt, flags);
746
}
747
 
748
 
749
/* If STMT is a call that may clobber globals and other symbols that
750
   escape, add them to the VDEF/VUSE lists for it.  */
751
 
752
static void
753
maybe_add_call_vops (gimple stmt)
754
{
755
  int call_flags = gimple_call_flags (stmt);
756
 
757
  /* If aliases have been computed already, add VDEF or VUSE
758
     operands for all the symbols that have been found to be
759
     call-clobbered.  */
760
  if (!(call_flags & ECF_NOVOPS))
761
    {
762
      /* A 'pure' or a 'const' function never call-clobbers anything.
763
         A 'noreturn' function might, but since we don't return anyway
764
         there is no point in recording that.  */
765
      if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
766
        add_virtual_operand (stmt, opf_def);
767
      else if (!(call_flags & ECF_CONST))
768
        add_virtual_operand (stmt, opf_use);
769
    }
770
}
771
 
772
 
773
/* Scan operands in the ASM_EXPR stmt referred to in INFO.  */
774
 
775
static void
776
get_asm_expr_operands (gimple stmt)
777
{
778
  size_t i, noutputs;
779
  const char **oconstraints;
780
  const char *constraint;
781
  bool allows_mem, allows_reg, is_inout;
782
 
783
  noutputs = gimple_asm_noutputs (stmt);
784
  oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
785
 
786
  /* Gather all output operands.  */
787
  for (i = 0; i < gimple_asm_noutputs (stmt); i++)
788
    {
789
      tree link = gimple_asm_output_op (stmt, i);
790
      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
791
      oconstraints[i] = constraint;
792
      parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
793
                               &allows_reg, &is_inout);
794
 
795
      /* This should have been split in gimplify_asm_expr.  */
796
      gcc_assert (!allows_reg || !is_inout);
797
 
798
      /* Memory operands are addressable.  Note that STMT needs the
799
         address of this operand.  */
800
      if (!allows_reg && allows_mem)
801
        {
802
          tree t = get_base_address (TREE_VALUE (link));
803
          if (t && DECL_P (t))
804
            mark_address_taken (t);
805
        }
806
 
807
      get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
808
    }
809
 
810
  /* Gather all input operands.  */
811
  for (i = 0; i < gimple_asm_ninputs (stmt); i++)
812
    {
813
      tree link = gimple_asm_input_op (stmt, i);
814
      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
815
      parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
816
                              &allows_mem, &allows_reg);
817
 
818
      /* Memory operands are addressable.  Note that STMT needs the
819
         address of this operand.  */
820
      if (!allows_reg && allows_mem)
821
        {
822
          tree t = get_base_address (TREE_VALUE (link));
823
          if (t && DECL_P (t))
824
            mark_address_taken (t);
825
        }
826
 
827
      get_expr_operands (stmt, &TREE_VALUE (link), 0);
828
    }
829
 
830
  /* Clobber all memory and addressable symbols for asm ("" : : : "memory");  */
831
  for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
832
    {
833
      tree link = gimple_asm_clobber_op (stmt, i);
834
      if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
835
        {
836
          add_virtual_operand (stmt, opf_def);
837
          break;
838
        }
839
    }
840
}
841
 
842
 
843
/* Recursively scan the expression pointed to by EXPR_P in statement
844
   STMT.  FLAGS is one of the OPF_* constants modifying how to
845
   interpret the operands found.  */
846
 
847
static void
848
get_expr_operands (gimple stmt, tree *expr_p, int flags)
849
{
850
  enum tree_code code;
851
  enum tree_code_class codeclass;
852
  tree expr = *expr_p;
853
  int uflags = opf_use;
854
 
855
  if (expr == NULL)
856
    return;
857
 
858
  if (is_gimple_debug (stmt))
859
    uflags |= (flags & opf_no_vops);
860
 
861
  code = TREE_CODE (expr);
862
  codeclass = TREE_CODE_CLASS (code);
863
 
864
  switch (code)
865
    {
866
    case ADDR_EXPR:
867
      /* Taking the address of a variable does not represent a
868
         reference to it, but the fact that the statement takes its
869
         address will be of interest to some passes (e.g. alias
870
         resolution).  */
871
      if (!is_gimple_debug (stmt))
872
        mark_address_taken (TREE_OPERAND (expr, 0));
873
 
874
      /* If the address is invariant, there may be no interesting
875
         variable references inside.  */
876
      if (is_gimple_min_invariant (expr))
877
        return;
878
 
879
      /* Otherwise, there may be variables referenced inside but there
880
         should be no VUSEs created, since the referenced objects are
881
         not really accessed.  The only operands that we should find
882
         here are ARRAY_REF indices which will always be real operands
883
         (GIMPLE does not allow non-registers as array indices).  */
884
      flags |= opf_no_vops;
885
      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
886
      return;
887
 
888
    case SSA_NAME:
889
     add_stmt_operand (expr_p, stmt, flags);
890
     return;
891
 
892
    case VAR_DECL:
893
    case PARM_DECL:
894
    case RESULT_DECL:
895
      add_stmt_operand (expr_p, stmt, flags);
896
      return;
897
 
898
    case DEBUG_EXPR_DECL:
899
      gcc_assert (gimple_debug_bind_p (stmt));
900
      return;
901
 
902
    case MISALIGNED_INDIRECT_REF:
903
      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
904
      /* fall through */
905
 
906
    case ALIGN_INDIRECT_REF:
907
    case INDIRECT_REF:
908
      get_indirect_ref_operands (stmt, expr, flags, true);
909
      return;
910
 
911
    case TARGET_MEM_REF:
912
      get_tmr_operands (stmt, expr, flags);
913
      return;
914
 
915
    case ARRAY_REF:
916
    case ARRAY_RANGE_REF:
917
    case COMPONENT_REF:
918
    case REALPART_EXPR:
919
    case IMAGPART_EXPR:
920
      {
921
        if (TREE_THIS_VOLATILE (expr))
922
          gimple_set_has_volatile_ops (stmt, true);
923
 
924
        get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
925
 
926
        if (code == COMPONENT_REF)
927
          {
928
            if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
929
              gimple_set_has_volatile_ops (stmt, true);
930
            get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
931
          }
932
        else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
933
          {
934
            get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
935
            get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
936
            get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags);
937
          }
938
 
939
        return;
940
      }
941
 
942
    case WITH_SIZE_EXPR:
943
      /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
944
         and an rvalue reference to its second argument.  */
945
      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
946
      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
947
      return;
948
 
949
    case COND_EXPR:
950
    case VEC_COND_EXPR:
951
      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags);
952
      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
953
      get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
954
      return;
955
 
956
    case CONSTRUCTOR:
957
      {
958
        /* General aggregate CONSTRUCTORs have been decomposed, but they
959
           are still in use as the COMPLEX_EXPR equivalent for vectors.  */
960
        constructor_elt *ce;
961
        unsigned HOST_WIDE_INT idx;
962
 
963
        for (idx = 0;
964
             VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
965
             idx++)
966
          get_expr_operands (stmt, &ce->value, uflags);
967
 
968
        return;
969
      }
970
 
971
    case BIT_FIELD_REF:
972
      if (TREE_THIS_VOLATILE (expr))
973
        gimple_set_has_volatile_ops (stmt, true);
974
      /* FALLTHRU */
975
 
976
    case TRUTH_NOT_EXPR:
977
    case VIEW_CONVERT_EXPR:
978
    do_unary:
979
      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
980
      return;
981
 
982
    case TRUTH_AND_EXPR:
983
    case TRUTH_OR_EXPR:
984
    case TRUTH_XOR_EXPR:
985
    case COMPOUND_EXPR:
986
    case OBJ_TYPE_REF:
987
    case ASSERT_EXPR:
988
    do_binary:
989
      {
990
        get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
991
        get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
992
        return;
993
      }
994
 
995
    case DOT_PROD_EXPR:
996
    case REALIGN_LOAD_EXPR:
997
      {
998
        get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
999
        get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1000
        get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1001
        return;
1002
      }
1003
 
1004
    case FUNCTION_DECL:
1005
    case LABEL_DECL:
1006
    case CONST_DECL:
1007
    case CASE_LABEL_EXPR:
1008
      /* Expressions that make no memory references.  */
1009
      return;
1010
 
1011
    default:
1012
      if (codeclass == tcc_unary)
1013
        goto do_unary;
1014
      if (codeclass == tcc_binary || codeclass == tcc_comparison)
1015
        goto do_binary;
1016
      if (codeclass == tcc_constant || codeclass == tcc_type)
1017
        return;
1018
    }
1019
 
1020
  /* If we get here, something has gone wrong.  */
1021
#ifdef ENABLE_CHECKING
1022
  fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1023
  debug_tree (expr);
1024
  fputs ("\n", stderr);
1025
#endif
1026
  gcc_unreachable ();
1027
}
1028
 
1029
 
1030
/* Parse STMT looking for operands.  When finished, the various
1031
   build_* operand vectors will have potential operands in them.  */
1032
 
1033
static void
1034
parse_ssa_operands (gimple stmt)
1035
{
1036
  enum gimple_code code = gimple_code (stmt);
1037
 
1038
  if (code == GIMPLE_ASM)
1039
    get_asm_expr_operands (stmt);
1040
  else if (is_gimple_debug (stmt))
1041
    {
1042
      if (gimple_debug_bind_p (stmt)
1043
          && gimple_debug_bind_has_value_p (stmt))
1044
        get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt),
1045
                           opf_use | opf_no_vops);
1046
    }
1047
  else
1048
    {
1049
      size_t i, start = 0;
1050
 
1051
      if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1052
        {
1053
          get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
1054
          start = 1;
1055
        }
1056
 
1057
      for (i = start; i < gimple_num_ops (stmt); i++)
1058
        get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
1059
 
1060
      /* Add call-clobbered operands, if needed.  */
1061
      if (code == GIMPLE_CALL)
1062
        maybe_add_call_vops (stmt);
1063
    }
1064
}
1065
 
1066
 
1067
/* Create an operands cache for STMT.  */
1068
 
1069
static void
1070
build_ssa_operands (gimple stmt)
1071
{
1072
  /* Initially assume that the statement has no volatile operands.  */
1073
  gimple_set_has_volatile_ops (stmt, false);
1074
 
1075
  start_ssa_stmt_operands ();
1076
  parse_ssa_operands (stmt);
1077
  finalize_ssa_stmt_operands (stmt);
1078
}
1079
 
1080
 
1081
/* Releases the operands of STMT back to their freelists, and clears
1082
   the stmt operand lists.  */
1083
 
1084
void
1085
free_stmt_operands (gimple stmt)
1086
{
1087
  def_optype_p defs = gimple_def_ops (stmt), last_def;
1088
  use_optype_p uses = gimple_use_ops (stmt), last_use;
1089
 
1090
  if (defs)
1091
    {
1092
      for (last_def = defs; last_def->next; last_def = last_def->next)
1093
        continue;
1094
      last_def->next = gimple_ssa_operands (cfun)->free_defs;
1095
      gimple_ssa_operands (cfun)->free_defs = defs;
1096
      gimple_set_def_ops (stmt, NULL);
1097
    }
1098
 
1099
  if (uses)
1100
    {
1101
      for (last_use = uses; last_use->next; last_use = last_use->next)
1102
        delink_imm_use (USE_OP_PTR (last_use));
1103
      delink_imm_use (USE_OP_PTR (last_use));
1104
      last_use->next = gimple_ssa_operands (cfun)->free_uses;
1105
      gimple_ssa_operands (cfun)->free_uses = uses;
1106
      gimple_set_use_ops (stmt, NULL);
1107
    }
1108
 
1109
  if (gimple_has_mem_ops (stmt))
1110
    {
1111
      gimple_set_vuse (stmt, NULL_TREE);
1112
      gimple_set_vdef (stmt, NULL_TREE);
1113
    }
1114
}
1115
 
1116
 
1117
/* Get the operands of statement STMT.  */
1118
 
1119
void
1120
update_stmt_operands (gimple stmt)
1121
{
1122
  /* If update_stmt_operands is called before SSA is initialized, do
1123
     nothing.  */
1124
  if (!ssa_operands_active ())
1125
    return;
1126
 
1127
  timevar_push (TV_TREE_OPS);
1128
 
1129
  gcc_assert (gimple_modified_p (stmt));
1130
  build_ssa_operands (stmt);
1131
  gimple_set_modified (stmt, false);
1132
 
1133
  timevar_pop (TV_TREE_OPS);
1134
}
1135
 
1136
 
1137
/* Swap operands EXP0 and EXP1 in statement STMT.  No attempt is done
1138
   to test the validity of the swap operation.  */
1139
 
1140
void
1141
swap_tree_operands (gimple stmt, tree *exp0, tree *exp1)
1142
{
1143
  tree op0, op1;
1144
  op0 = *exp0;
1145
  op1 = *exp1;
1146
 
1147
  /* If the operand cache is active, attempt to preserve the relative
1148
     positions of these two operands in their respective immediate use
1149
     lists.  */
1150
  if (ssa_operands_active () && op0 != op1)
1151
    {
1152
      use_optype_p use0, use1, ptr;
1153
      use0 = use1 = NULL;
1154
 
1155
      /* Find the 2 operands in the cache, if they are there.  */
1156
      for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1157
        if (USE_OP_PTR (ptr)->use == exp0)
1158
          {
1159
            use0 = ptr;
1160
            break;
1161
          }
1162
 
1163
      for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1164
        if (USE_OP_PTR (ptr)->use == exp1)
1165
          {
1166
            use1 = ptr;
1167
            break;
1168
          }
1169
 
1170
      /* If both uses don't have operand entries, there isn't much we can do
1171
         at this point.  Presumably we don't need to worry about it.  */
1172
      if (use0 && use1)
1173
        {
1174
          tree *tmp = USE_OP_PTR (use1)->use;
1175
          USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1176
          USE_OP_PTR (use0)->use = tmp;
1177
        }
1178
    }
1179
 
1180
  /* Now swap the data.  */
1181
  *exp0 = op1;
1182
  *exp1 = op0;
1183
}
1184
 
1185
 
1186
/* Scan the immediate_use list for VAR making sure its linked properly.
1187
   Return TRUE if there is a problem and emit an error message to F.  */
1188
 
1189
bool
1190
verify_imm_links (FILE *f, tree var)
1191
{
1192
  use_operand_p ptr, prev, list;
1193
  int count;
1194
 
1195
  gcc_assert (TREE_CODE (var) == SSA_NAME);
1196
 
1197
  list = &(SSA_NAME_IMM_USE_NODE (var));
1198
  gcc_assert (list->use == NULL);
1199
 
1200
  if (list->prev == NULL)
1201
    {
1202
      gcc_assert (list->next == NULL);
1203
      return false;
1204
    }
1205
 
1206
  prev = list;
1207
  count = 0;
1208
  for (ptr = list->next; ptr != list; )
1209
    {
1210
      if (prev != ptr->prev)
1211
        goto error;
1212
 
1213
      if (ptr->use == NULL)
1214
        goto error; /* 2 roots, or SAFE guard node.  */
1215
      else if (*(ptr->use) != var)
1216
        goto error;
1217
 
1218
      prev = ptr;
1219
      ptr = ptr->next;
1220
 
1221
      /* Avoid infinite loops.  50,000,000 uses probably indicates a
1222
         problem.  */
1223
      if (count++ > 50000000)
1224
        goto error;
1225
    }
1226
 
1227
  /* Verify list in the other direction.  */
1228
  prev = list;
1229
  for (ptr = list->prev; ptr != list; )
1230
    {
1231
      if (prev != ptr->next)
1232
        goto error;
1233
      prev = ptr;
1234
      ptr = ptr->prev;
1235
      if (count-- < 0)
1236
        goto error;
1237
    }
1238
 
1239
  if (count != 0)
1240
    goto error;
1241
 
1242
  return false;
1243
 
1244
 error:
1245
  if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1246
    {
1247
      fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1248
      print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1249
    }
1250
  fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1251
           (void *)ptr->use);
1252
  print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1253
  fprintf(f, "\n");
1254
  return true;
1255
}
1256
 
1257
 
1258
/* Dump all the immediate uses to FILE.  */
1259
 
1260
void
1261
dump_immediate_uses_for (FILE *file, tree var)
1262
{
1263
  imm_use_iterator iter;
1264
  use_operand_p use_p;
1265
 
1266
  gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1267
 
1268
  print_generic_expr (file, var, TDF_SLIM);
1269
  fprintf (file, " : -->");
1270
  if (has_zero_uses (var))
1271
    fprintf (file, " no uses.\n");
1272
  else
1273
    if (has_single_use (var))
1274
      fprintf (file, " single use.\n");
1275
    else
1276
      fprintf (file, "%d uses.\n", num_imm_uses (var));
1277
 
1278
  FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1279
    {
1280
      if (use_p->loc.stmt == NULL && use_p->use == NULL)
1281
        fprintf (file, "***end of stmt iterator marker***\n");
1282
      else
1283
        if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1284
          print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1285
        else
1286
          print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1287
    }
1288
  fprintf(file, "\n");
1289
}
1290
 
1291
 
1292
/* Dump all the immediate uses to FILE.  */
1293
 
1294
void
1295
dump_immediate_uses (FILE *file)
1296
{
1297
  tree var;
1298
  unsigned int x;
1299
 
1300
  fprintf (file, "Immediate_uses: \n\n");
1301
  for (x = 1; x < num_ssa_names; x++)
1302
    {
1303
      var = ssa_name(x);
1304
      if (!var)
1305
        continue;
1306
      dump_immediate_uses_for (file, var);
1307
    }
1308
}
1309
 
1310
 
1311
/* Dump def-use edges on stderr.  */
1312
 
1313
void
1314
debug_immediate_uses (void)
1315
{
1316
  dump_immediate_uses (stderr);
1317
}
1318
 
1319
 
1320
/* Dump def-use edges on stderr.  */
1321
 
1322
void
1323
debug_immediate_uses_for (tree var)
1324
{
1325
  dump_immediate_uses_for (stderr, var);
1326
}
1327
 
1328
 
1329
/* Unlink STMTs virtual definition from the IL by propagating its use.  */
1330
 
1331
void
1332
unlink_stmt_vdef (gimple stmt)
1333
{
1334
  use_operand_p use_p;
1335
  imm_use_iterator iter;
1336
  gimple use_stmt;
1337
  tree vdef = gimple_vdef (stmt);
1338
 
1339
  if (!vdef
1340
      || TREE_CODE (vdef) != SSA_NAME)
1341
    return;
1342
 
1343
  FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (stmt))
1344
    {
1345
      FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1346
        SET_USE (use_p, gimple_vuse (stmt));
1347
    }
1348
 
1349
  if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)))
1350
    SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vuse (stmt)) = 1;
1351
}
1352
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.