OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.2.2/] [gcc/] [tree-ssa-operands.c] - Blame information for rev 193

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* SSA operands management for trees.
2
   Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3
 
4
This file is part of GCC.
5
 
6
GCC is free software; you can redistribute it and/or modify
7
it under the terms of the GNU General Public License as published by
8
the Free Software Foundation; either version 3, or (at your option)
9
any later version.
10
 
11
GCC is distributed in the hope that it will be useful,
12
but WITHOUT ANY WARRANTY; without even the implied warranty of
13
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
GNU General Public License for more details.
15
 
16
You should have received a copy of the GNU General Public License
17
along with GCC; see the file COPYING3.  If not see
18
<http://www.gnu.org/licenses/>.  */
19
 
20
#include "config.h"
21
#include "system.h"
22
#include "coretypes.h"
23
#include "tm.h"
24
#include "tree.h"
25
#include "flags.h"
26
#include "function.h"
27
#include "diagnostic.h"
28
#include "tree-flow.h"
29
#include "tree-inline.h"
30
#include "tree-pass.h"
31
#include "ggc.h"
32
#include "timevar.h"
33
#include "toplev.h"
34
#include "langhooks.h"
35
#include "ipa-reference.h"
36
 
37
/* This file contains the code required to manage the operands cache of the
38
   SSA optimizer.  For every stmt, we maintain an operand cache in the stmt
39
   annotation.  This cache contains operands that will be of interest to
40
   optimizers and other passes wishing to manipulate the IL.
41
 
42
   The operand type are broken up into REAL and VIRTUAL operands.  The real
43
   operands are represented as pointers into the stmt's operand tree.  Thus
44
   any manipulation of the real operands will be reflected in the actual tree.
45
   Virtual operands are represented solely in the cache, although the base
46
   variable for the SSA_NAME may, or may not occur in the stmt's tree.
47
   Manipulation of the virtual operands will not be reflected in the stmt tree.
48
 
49
   The routines in this file are concerned with creating this operand cache
50
   from a stmt tree.
51
 
52
   The operand tree is the parsed by the various get_* routines which look
53
   through the stmt tree for the occurrence of operands which may be of
54
   interest, and calls are made to the append_* routines whenever one is
55
   found.  There are 5 of these routines, each representing one of the
56
   5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
57
   Virtual Must Defs.
58
 
59
   The append_* routines check for duplication, and simply keep a list of
60
   unique objects for each operand type in the build_* extendable vectors.
61
 
62
   Once the stmt tree is completely parsed, the finalize_ssa_operands()
63
   routine is called, which proceeds to perform the finalization routine
64
   on each of the 5 operand vectors which have been built up.
65
 
66
   If the stmt had a previous operand cache, the finalization routines
67
   attempt to match up the new operands with the old ones.  If it's a perfect
68
   match, the old vector is simply reused.  If it isn't a perfect match, then
69
   a new vector is created and the new operands are placed there.  For
70
   virtual operands, if the previous cache had SSA_NAME version of a
71
   variable, and that same variable occurs in the same operands cache, then
72
   the new cache vector will also get the same SSA_NAME.
73
 
74
  i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
75
  vector for VUSE, then the new vector will also be modified such that
76
  it contains 'a_5' rather than 'a'.  */
77
 
78
/* Flags to describe operand properties in helpers.  */
79
 
80
/* By default, operands are loaded.  */
81
#define opf_none        0
82
 
83
/* Operand is the target of an assignment expression or a
84
   call-clobbered variable.  */
85
#define opf_is_def      (1 << 0)
86
 
87
/* Operand is the target of an assignment expression.  */
88
#define opf_kill_def    (1 << 1)
89
 
90
/* No virtual operands should be created in the expression.  This is used
91
   when traversing ADDR_EXPR nodes which have different semantics than
92
   other expressions.  Inside an ADDR_EXPR node, the only operands that we
93
   need to consider are indices into arrays.  For instance, &a.b[i] should
94
   generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
95
   VUSE for 'b'.  */
96
#define opf_no_vops     (1 << 2)
97
 
98
/* Operand is a "non-specific" kill for call-clobbers and such.  This
99
   is used to distinguish "reset the world" events from explicit
100
   MODIFY_EXPRs.  */
101
#define opf_non_specific  (1 << 3)
102
 
103
/* Array for building all the def operands.  */
104
static VEC(tree,heap) *build_defs;
105
 
106
/* Array for building all the use operands.  */
107
static VEC(tree,heap) *build_uses;
108
 
109
/* Array for building all the V_MAY_DEF operands.  */
110
static VEC(tree,heap) *build_v_may_defs;
111
 
112
/* Array for building all the VUSE operands.  */
113
static VEC(tree,heap) *build_vuses;
114
 
115
/* Array for building all the V_MUST_DEF operands.  */
116
static VEC(tree,heap) *build_v_must_defs;
117
 
118
/* These arrays are the cached operand vectors for call clobbered calls.  */
119
static bool ops_active = false;
120
 
121
static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
122
static unsigned operand_memory_index;
123
 
124
static void get_expr_operands (tree, tree *, int);
125
 
126
static def_optype_p free_defs = NULL;
127
static use_optype_p free_uses = NULL;
128
static vuse_optype_p free_vuses = NULL;
129
static maydef_optype_p free_maydefs = NULL;
130
static mustdef_optype_p free_mustdefs = NULL;
131
 
132
/* Allocates operand OP of given TYPE from the appropriate free list,
133
   or of the new value if the list is empty.  */
134
 
135
#define ALLOC_OPTYPE(OP, TYPE)                          \
136
  do                                                    \
137
    {                                                   \
138
      TYPE##_optype_p ret = free_##TYPE##s;             \
139
      if (ret)                                          \
140
        free_##TYPE##s = ret->next;                     \
141
      else                                              \
142
        ret = ssa_operand_alloc (sizeof (*ret));        \
143
      (OP) = ret;                                       \
144
    } while (0)
145
 
146
/* Return the DECL_UID of the base variable of T.  */
147
 
148
static inline unsigned
149
get_name_decl (tree t)
150
{
151
  if (TREE_CODE (t) != SSA_NAME)
152
    return DECL_UID (t);
153
  else
154
    return DECL_UID (SSA_NAME_VAR (t));
155
}
156
 
157
 
158
/* Comparison function for qsort used in operand_build_sort_virtual.  */
159
 
160
static int
161
operand_build_cmp (const void *p, const void *q)
162
{
163
  tree e1 = *((const tree *)p);
164
  tree e2 = *((const tree *)q);
165
  unsigned int u1,u2;
166
 
167
  u1 = get_name_decl (e1);
168
  u2 = get_name_decl (e2);
169
 
170
  /* We want to sort in ascending order.  They can never be equal.  */
171
#ifdef ENABLE_CHECKING
172
  gcc_assert (u1 != u2);
173
#endif
174
  return (u1 > u2 ? 1 : -1);
175
}
176
 
177
 
178
/* Sort the virtual operands in LIST from lowest DECL_UID to highest.  */
179
 
180
static inline void
181
operand_build_sort_virtual (VEC(tree,heap) *list)
182
{
183
  int num = VEC_length (tree, list);
184
 
185
  if (num < 2)
186
    return;
187
 
188
  if (num == 2)
189
    {
190
      if (get_name_decl (VEC_index (tree, list, 0))
191
          > get_name_decl (VEC_index (tree, list, 1)))
192
        {
193
          /* Swap elements if in the wrong order.  */
194
          tree tmp = VEC_index (tree, list, 0);
195
          VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
196
          VEC_replace (tree, list, 1, tmp);
197
        }
198
      return;
199
    }
200
 
201
  /* There are 3 or more elements, call qsort.  */
202
  qsort (VEC_address (tree, list),
203
         VEC_length (tree, list),
204
         sizeof (tree),
205
         operand_build_cmp);
206
}
207
 
208
 
209
/*  Return true if the SSA operands cache is active.  */
210
 
211
bool
212
ssa_operands_active (void)
213
{
214
  return ops_active;
215
}
216
 
217
 
218
/* Structure storing statistics on how many call clobbers we have, and
219
   how many where avoided.  */
220
 
221
static struct
222
{
223
  /* Number of call-clobbered ops we attempt to add to calls in
224
     add_call_clobber_ops.  */
225
  unsigned int clobbered_vars;
226
 
227
  /* Number of write-clobbers (V_MAY_DEFs) avoided by using
228
     not_written information.  */
229
  unsigned int static_write_clobbers_avoided;
230
 
231
  /* Number of reads (VUSEs) avoided by using not_read information.  */
232
  unsigned int static_read_clobbers_avoided;
233
 
234
  /* Number of write-clobbers avoided because the variable can't escape to
235
     this call.  */
236
  unsigned int unescapable_clobbers_avoided;
237
 
238
  /* Number of read-only uses we attempt to add to calls in
239
     add_call_read_ops.  */
240
  unsigned int readonly_clobbers;
241
 
242
  /* Number of read-only uses we avoid using not_read information.  */
243
  unsigned int static_readonly_clobbers_avoided;
244
} clobber_stats;
245
 
246
 
247
/* Initialize the operand cache routines.  */
248
 
249
void
250
init_ssa_operands (void)
251
{
252
  build_defs = VEC_alloc (tree, heap, 5);
253
  build_uses = VEC_alloc (tree, heap, 10);
254
  build_vuses = VEC_alloc (tree, heap, 25);
255
  build_v_may_defs = VEC_alloc (tree, heap, 25);
256
  build_v_must_defs = VEC_alloc (tree, heap, 25);
257
 
258
  gcc_assert (operand_memory == NULL);
259
  operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
260
  ops_active = true;
261
  memset (&clobber_stats, 0, sizeof (clobber_stats));
262
}
263
 
264
 
265
/* Dispose of anything required by the operand routines.  */
266
 
267
void
268
fini_ssa_operands (void)
269
{
270
  struct ssa_operand_memory_d *ptr;
271
  VEC_free (tree, heap, build_defs);
272
  VEC_free (tree, heap, build_uses);
273
  VEC_free (tree, heap, build_v_must_defs);
274
  VEC_free (tree, heap, build_v_may_defs);
275
  VEC_free (tree, heap, build_vuses);
276
  free_defs = NULL;
277
  free_uses = NULL;
278
  free_vuses = NULL;
279
  free_maydefs = NULL;
280
  free_mustdefs = NULL;
281
  while ((ptr = operand_memory) != NULL)
282
    {
283
      operand_memory = operand_memory->next;
284
      ggc_free (ptr);
285
    }
286
 
287
  ops_active = false;
288
 
289
  if (dump_file && (dump_flags & TDF_STATS))
290
    {
291
      fprintf (dump_file, "Original clobbered vars:%d\n",
292
               clobber_stats.clobbered_vars);
293
      fprintf (dump_file, "Static write clobbers avoided:%d\n",
294
               clobber_stats.static_write_clobbers_avoided);
295
      fprintf (dump_file, "Static read clobbers avoided:%d\n",
296
               clobber_stats.static_read_clobbers_avoided);
297
      fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
298
               clobber_stats.unescapable_clobbers_avoided);
299
      fprintf (dump_file, "Original read-only clobbers:%d\n",
300
               clobber_stats.readonly_clobbers);
301
      fprintf (dump_file, "Static read-only clobbers avoided:%d\n",
302
               clobber_stats.static_readonly_clobbers_avoided);
303
    }
304
}
305
 
306
 
307
/* Return memory for operands of SIZE chunks.  */
308
 
309
static inline void *
310
ssa_operand_alloc (unsigned size)
311
{
312
  char *ptr;
313
  if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
314
    {
315
      struct ssa_operand_memory_d *ptr;
316
      ptr = GGC_NEW (struct ssa_operand_memory_d);
317
      ptr->next = operand_memory;
318
      operand_memory = ptr;
319
      operand_memory_index = 0;
320
    }
321
  ptr = &(operand_memory->mem[operand_memory_index]);
322
  operand_memory_index += size;
323
  return ptr;
324
}
325
 
326
 
327
 
328
/* This routine makes sure that PTR is in an immediate use list, and makes
329
   sure the stmt pointer is set to the current stmt.  */
330
 
331
static inline void
332
set_virtual_use_link (use_operand_p ptr, tree stmt)
333
{
334
  /*  fold_stmt may have changed the stmt pointers.  */
335
  if (ptr->stmt != stmt)
336
    ptr->stmt = stmt;
337
 
338
  /* If this use isn't in a list, add it to the correct list.  */
339
  if (!ptr->prev)
340
    link_imm_use (ptr, *(ptr->use));
341
}
342
 
343
/* Appends ELT after TO, and moves the TO pointer to ELT.  */
344
 
345
#define APPEND_OP_AFTER(ELT, TO)        \
346
  do                                    \
347
    {                                   \
348
      (TO)->next = (ELT);               \
349
      (TO) = (ELT);                     \
350
    } while (0)
351
 
352
/* Appends head of list FROM after TO, and move both pointers
353
   to their successors.  */
354
 
355
#define MOVE_HEAD_AFTER(FROM, TO)       \
356
  do                                    \
357
    {                                   \
358
      APPEND_OP_AFTER (FROM, TO);       \
359
      (FROM) = (FROM)->next;            \
360
    } while (0)
361
 
362
/* Moves OP to appropriate freelist.  OP is set to its successor.  */
363
 
364
#define MOVE_HEAD_TO_FREELIST(OP, TYPE)                 \
365
  do                                                    \
366
    {                                                   \
367
      TYPE##_optype_p next = (OP)->next;                \
368
      (OP)->next = free_##TYPE##s;                      \
369
      free_##TYPE##s = (OP);                            \
370
      (OP) = next;                                      \
371
    } while (0)
372
 
373
/* Initializes immediate use at USE_PTR to value VAL, and links it to the list
374
   of immediate uses.  STMT is the current statement.  */
375
 
376
#define INITIALIZE_USE(USE_PTR, VAL, STMT)              \
377
  do                                                    \
378
    {                                                   \
379
      (USE_PTR)->use = (VAL);                           \
380
      link_imm_use_stmt ((USE_PTR), *(VAL), (STMT));    \
381
    } while (0)
382
 
383
/* Adds OP to the list of defs after LAST, and moves
384
   LAST to the new element.  */
385
 
386
static inline void
387
add_def_op (tree *op, def_optype_p *last)
388
{
389
  def_optype_p new;
390
 
391
  ALLOC_OPTYPE (new, def);
392
  DEF_OP_PTR (new) = op;
393
  APPEND_OP_AFTER (new, *last);
394
}
395
 
396
/* Adds OP to the list of uses of statement STMT after LAST, and moves
397
   LAST to the new element.  */
398
 
399
static inline void
400
add_use_op (tree stmt, tree *op, use_optype_p *last)
401
{
402
  use_optype_p new;
403
 
404
  ALLOC_OPTYPE (new, use);
405
  INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
406
  APPEND_OP_AFTER (new, *last);
407
}
408
 
409
/* Adds OP to the list of vuses of statement STMT after LAST, and moves
410
   LAST to the new element.  */
411
 
412
static inline void
413
add_vuse_op (tree stmt, tree op, vuse_optype_p *last)
414
{
415
  vuse_optype_p new;
416
 
417
  ALLOC_OPTYPE (new, vuse);
418
  VUSE_OP (new) = op;
419
  INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt);
420
  APPEND_OP_AFTER (new, *last);
421
}
422
 
423
/* Adds OP to the list of maydefs of statement STMT after LAST, and moves
424
   LAST to the new element.  */
425
 
426
static inline void
427
add_maydef_op (tree stmt, tree op, maydef_optype_p *last)
428
{
429
  maydef_optype_p new;
430
 
431
  ALLOC_OPTYPE (new, maydef);
432
  MAYDEF_RESULT (new) = op;
433
  MAYDEF_OP (new) = op;
434
  INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt);
435
  APPEND_OP_AFTER (new, *last);
436
}
437
 
438
/* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
439
   LAST to the new element.  */
440
 
441
static inline void
442
add_mustdef_op (tree stmt, tree op, mustdef_optype_p *last)
443
{
444
  mustdef_optype_p new;
445
 
446
  ALLOC_OPTYPE (new, mustdef);
447
  MUSTDEF_RESULT (new) = op;
448
  MUSTDEF_KILL (new) = op;
449
  INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt);
450
  APPEND_OP_AFTER (new, *last);
451
}
452
 
453
/* Takes elements from build_defs and turns them into def operands of STMT.
454
   TODO -- Given that def operands list is not necessarily sorted, merging
455
           the operands this way does not make much sense.
456
        -- Make build_defs VEC of tree *.  */
457
 
458
static inline void
459
finalize_ssa_def_ops (tree stmt)
460
{
461
  unsigned new_i;
462
  struct def_optype_d new_list;
463
  def_optype_p old_ops, last;
464
  tree *old_base;
465
 
466
  new_list.next = NULL;
467
  last = &new_list;
468
 
469
  old_ops = DEF_OPS (stmt);
470
 
471
  new_i = 0;
472
  while (old_ops && new_i < VEC_length (tree, build_defs))
473
    {
474
      tree *new_base = (tree *) VEC_index (tree, build_defs, new_i);
475
      old_base = DEF_OP_PTR (old_ops);
476
 
477
      if (old_base == new_base)
478
        {
479
          /* if variables are the same, reuse this node.  */
480
          MOVE_HEAD_AFTER (old_ops, last);
481
          new_i++;
482
        }
483
      else if (old_base < new_base)
484
        {
485
          /* if old is less than new, old goes to the free list.  */
486
          MOVE_HEAD_TO_FREELIST (old_ops, def);
487
        }
488
      else
489
        {
490
          /* This is a new operand.  */
491
          add_def_op (new_base, &last);
492
          new_i++;
493
        }
494
    }
495
 
496
  /* If there is anything remaining in the build_defs list, simply emit it.  */
497
  for ( ; new_i < VEC_length (tree, build_defs); new_i++)
498
    add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last);
499
 
500
  last->next = NULL;
501
 
502
  /* If there is anything in the old list, free it.  */
503
  if (old_ops)
504
    {
505
      old_ops->next = free_defs;
506
      free_defs = old_ops;
507
    }
508
 
509
  /* Now set the stmt's operands.  */
510
  DEF_OPS (stmt) = new_list.next;
511
 
512
#ifdef ENABLE_CHECKING
513
  {
514
    def_optype_p ptr;
515
    unsigned x = 0;
516
    for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
517
      x++;
518
 
519
    gcc_assert (x == VEC_length (tree, build_defs));
520
  }
521
#endif
522
}
523
 
524
/* This routine will create stmt operands for STMT from the def build list.  */
525
 
526
static void
527
finalize_ssa_defs (tree stmt)
528
{
529
  unsigned int num = VEC_length (tree, build_defs);
530
 
531
  /* There should only be a single real definition per assignment.  */
532
  gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
533
 
534
  /* If there is an old list, often the new list is identical, or close, so
535
     find the elements at the beginning that are the same as the vector.  */
536
  finalize_ssa_def_ops (stmt);
537
  VEC_truncate (tree, build_defs, 0);
538
}
539
 
540
/* Takes elements from build_uses and turns them into use operands of STMT.
541
   TODO -- Make build_uses VEC of tree *.  */
542
 
543
static inline void
544
finalize_ssa_use_ops (tree stmt)
545
{
546
  unsigned new_i;
547
  struct use_optype_d new_list;
548
  use_optype_p old_ops, ptr, last;
549
 
550
  new_list.next = NULL;
551
  last = &new_list;
552
 
553
  old_ops = USE_OPS (stmt);
554
 
555
  /* If there is anything in the old list, free it.  */
556
  if (old_ops)
557
    {
558
      for (ptr = old_ops; ptr; ptr = ptr->next)
559
        delink_imm_use (USE_OP_PTR (ptr));
560
      old_ops->next = free_uses;
561
      free_uses = old_ops;
562
    }
563
 
564
  /* Now create nodes for all the new nodes.  */
565
  for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
566
    add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
567
 
568
  last->next = NULL;
569
 
570
  /* Now set the stmt's operands.  */
571
  USE_OPS (stmt) = new_list.next;
572
 
573
#ifdef ENABLE_CHECKING
574
  {
575
    unsigned x = 0;
576
    for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
577
      x++;
578
 
579
    gcc_assert (x == VEC_length (tree, build_uses));
580
  }
581
#endif
582
}
583
 
584
/* Return a new use operand vector for STMT, comparing to OLD_OPS_P.  */
585
 
586
static void
587
finalize_ssa_uses (tree stmt)
588
{
589
#ifdef ENABLE_CHECKING
590
  {
591
    unsigned x;
592
    unsigned num = VEC_length (tree, build_uses);
593
 
594
    /* If the pointer to the operand is the statement itself, something is
595
       wrong.  It means that we are pointing to a local variable (the
596
       initial call to update_stmt_operands does not pass a pointer to a
597
       statement).  */
598
    for (x = 0; x < num; x++)
599
      gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
600
  }
601
#endif
602
  finalize_ssa_use_ops (stmt);
603
  VEC_truncate (tree, build_uses, 0);
604
}
605
 
606
 
607
/* Takes elements from build_v_may_defs and turns them into maydef operands of
608
   STMT.  */
609
 
610
static inline void
611
finalize_ssa_v_may_def_ops (tree stmt)
612
{
613
  unsigned new_i;
614
  struct maydef_optype_d new_list;
615
  maydef_optype_p old_ops, ptr, last;
616
  tree act;
617
  unsigned old_base, new_base;
618
 
619
  new_list.next = NULL;
620
  last = &new_list;
621
 
622
  old_ops = MAYDEF_OPS (stmt);
623
 
624
  new_i = 0;
625
  while (old_ops && new_i < VEC_length (tree, build_v_may_defs))
626
    {
627
      act = VEC_index (tree, build_v_may_defs, new_i);
628
      new_base = get_name_decl (act);
629
      old_base = get_name_decl (MAYDEF_OP (old_ops));
630
 
631
      if (old_base == new_base)
632
        {
633
          /* if variables are the same, reuse this node.  */
634
          MOVE_HEAD_AFTER (old_ops, last);
635
          set_virtual_use_link (MAYDEF_OP_PTR (last), stmt);
636
          new_i++;
637
        }
638
      else if (old_base < new_base)
639
        {
640
          /* if old is less than new, old goes to the free list.  */
641
          delink_imm_use (MAYDEF_OP_PTR (old_ops));
642
          MOVE_HEAD_TO_FREELIST (old_ops, maydef);
643
        }
644
      else
645
        {
646
          /* This is a new operand.  */
647
          add_maydef_op (stmt, act, &last);
648
          new_i++;
649
        }
650
    }
651
 
652
  /* If there is anything remaining in the build_v_may_defs list, simply emit it.  */
653
  for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++)
654
    add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last);
655
 
656
  last->next = NULL;
657
 
658
  /* If there is anything in the old list, free it.  */
659
  if (old_ops)
660
    {
661
      for (ptr = old_ops; ptr; ptr = ptr->next)
662
        delink_imm_use (MAYDEF_OP_PTR (ptr));
663
      old_ops->next = free_maydefs;
664
      free_maydefs = old_ops;
665
    }
666
 
667
  /* Now set the stmt's operands.  */
668
  MAYDEF_OPS (stmt) = new_list.next;
669
 
670
#ifdef ENABLE_CHECKING
671
  {
672
    unsigned x = 0;
673
    for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next)
674
      x++;
675
 
676
    gcc_assert (x == VEC_length (tree, build_v_may_defs));
677
  }
678
#endif
679
}
680
 
681
static void
682
finalize_ssa_v_may_defs (tree stmt)
683
{
684
  finalize_ssa_v_may_def_ops (stmt);
685
}
686
 
687
 
688
/* Clear the in_list bits and empty the build array for V_MAY_DEFs.  */
689
 
690
static inline void
691
cleanup_v_may_defs (void)
692
{
693
  unsigned x, num;
694
  num = VEC_length (tree, build_v_may_defs);
695
 
696
  for (x = 0; x < num; x++)
697
    {
698
      tree t = VEC_index (tree, build_v_may_defs, x);
699
      if (TREE_CODE (t) != SSA_NAME)
700
        {
701
          var_ann_t ann = var_ann (t);
702
          ann->in_v_may_def_list = 0;
703
        }
704
    }
705
  VEC_truncate (tree, build_v_may_defs, 0);
706
}
707
 
708
 
709
/* Takes elements from build_vuses and turns them into vuse operands of
710
   STMT.  */
711
 
712
static inline void
713
finalize_ssa_vuse_ops (tree stmt)
714
{
715
  unsigned new_i;
716
  struct vuse_optype_d new_list;
717
  vuse_optype_p old_ops, ptr, last;
718
  tree act;
719
  unsigned old_base, new_base;
720
 
721
  new_list.next = NULL;
722
  last = &new_list;
723
 
724
  old_ops = VUSE_OPS (stmt);
725
 
726
  new_i = 0;
727
  while (old_ops && new_i < VEC_length (tree, build_vuses))
728
    {
729
      act = VEC_index (tree, build_vuses, new_i);
730
      new_base = get_name_decl (act);
731
      old_base = get_name_decl (VUSE_OP (old_ops));
732
 
733
      if (old_base == new_base)
734
        {
735
          /* if variables are the same, reuse this node.  */
736
          MOVE_HEAD_AFTER (old_ops, last);
737
          set_virtual_use_link (VUSE_OP_PTR (last), stmt);
738
          new_i++;
739
        }
740
      else if (old_base < new_base)
741
        {
742
          /* if old is less than new, old goes to the free list.  */
743
          delink_imm_use (USE_OP_PTR (old_ops));
744
          MOVE_HEAD_TO_FREELIST (old_ops, vuse);
745
        }
746
      else
747
        {
748
          /* This is a new operand.  */
749
          add_vuse_op (stmt, act, &last);
750
          new_i++;
751
        }
752
    }
753
 
754
  /* If there is anything remaining in the build_vuses list, simply emit it.  */
755
  for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
756
    add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last);
757
 
758
  last->next = NULL;
759
 
760
  /* If there is anything in the old list, free it.  */
761
  if (old_ops)
762
    {
763
      for (ptr = old_ops; ptr; ptr = ptr->next)
764
        delink_imm_use (VUSE_OP_PTR (ptr));
765
      old_ops->next = free_vuses;
766
      free_vuses = old_ops;
767
    }
768
 
769
  /* Now set the stmt's operands.  */
770
  VUSE_OPS (stmt) = new_list.next;
771
 
772
#ifdef ENABLE_CHECKING
773
  {
774
    unsigned x = 0;
775
    for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next)
776
      x++;
777
 
778
    gcc_assert (x == VEC_length (tree, build_vuses));
779
  }
780
#endif
781
}
782
 
783
/* Return a new VUSE operand vector, comparing to OLD_OPS_P.  */
784
 
785
static void
786
finalize_ssa_vuses (tree stmt)
787
{
788
  unsigned num, num_v_may_defs;
789
  unsigned vuse_index;
790
 
791
  /* Remove superfluous VUSE operands.  If the statement already has a
792
     V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
793
     not needed because V_MAY_DEFs imply a VUSE of the variable.  For
794
     instance, suppose that variable 'a' is aliased:
795
 
796
              # VUSE <a_2>
797
              # a_3 = V_MAY_DEF <a_2>
798
              a = a + 1;
799
 
800
     The VUSE <a_2> is superfluous because it is implied by the
801
     V_MAY_DEF operation.  */
802
  num = VEC_length (tree, build_vuses);
803
  num_v_may_defs = VEC_length (tree, build_v_may_defs);
804
 
805
  if (num > 0 && num_v_may_defs > 0)
806
    {
807
      for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
808
        {
809
          tree vuse;
810
          vuse = VEC_index (tree, build_vuses, vuse_index);
811
          if (TREE_CODE (vuse) != SSA_NAME)
812
            {
813
              var_ann_t ann = var_ann (vuse);
814
              ann->in_vuse_list = 0;
815
              if (ann->in_v_may_def_list)
816
                {
817
                  VEC_ordered_remove (tree, build_vuses, vuse_index);
818
                  continue;
819
                }
820
            }
821
          vuse_index++;
822
        }
823
    }
824
  else
825
    {
826
      /* Clear out the in_list bits.  */
827
      for (vuse_index = 0;
828
          vuse_index < VEC_length (tree, build_vuses);
829
          vuse_index++)
830
        {
831
          tree t = VEC_index (tree, build_vuses, vuse_index);
832
          if (TREE_CODE (t) != SSA_NAME)
833
            {
834
              var_ann_t ann = var_ann (t);
835
              ann->in_vuse_list = 0;
836
            }
837
        }
838
    }
839
 
840
  finalize_ssa_vuse_ops (stmt);
841
 
842
  /* The V_MAY_DEF build vector wasn't cleaned up because we needed it.  */
843
  cleanup_v_may_defs ();
844
 
845
  /* Free the VUSEs build vector.  */
846
  VEC_truncate (tree, build_vuses, 0);
847
 
848
}
849
 
850
/* Takes elements from build_v_must_defs and turns them into mustdef operands of
851
   STMT.  */
852
 
853
static inline void
854
finalize_ssa_v_must_def_ops (tree stmt)
855
{
856
  unsigned new_i;
857
  struct mustdef_optype_d new_list;
858
  mustdef_optype_p old_ops, ptr, last;
859
  tree act;
860
  unsigned old_base, new_base;
861
 
862
  new_list.next = NULL;
863
  last = &new_list;
864
 
865
  old_ops = MUSTDEF_OPS (stmt);
866
 
867
  new_i = 0;
868
  while (old_ops && new_i < VEC_length (tree, build_v_must_defs))
869
    {
870
      act = VEC_index (tree, build_v_must_defs, new_i);
871
      new_base = get_name_decl (act);
872
      old_base = get_name_decl (MUSTDEF_KILL (old_ops));
873
 
874
      if (old_base == new_base)
875
        {
876
          /* If variables are the same, reuse this node.  */
877
          MOVE_HEAD_AFTER (old_ops, last);
878
          set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt);
879
          new_i++;
880
        }
881
      else if (old_base < new_base)
882
        {
883
          /* If old is less than new, old goes to the free list.  */
884
          delink_imm_use (MUSTDEF_KILL_PTR (old_ops));
885
          MOVE_HEAD_TO_FREELIST (old_ops, mustdef);
886
        }
887
      else
888
        {
889
          /* This is a new operand.  */
890
          add_mustdef_op (stmt, act, &last);
891
          new_i++;
892
        }
893
    }
894
 
895
  /* If there is anything remaining in the build_v_must_defs list, simply emit it.  */
896
  for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++)
897
    add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last);
898
 
899
  last->next = NULL;
900
 
901
  /* If there is anything in the old list, free it.  */
902
  if (old_ops)
903
    {
904
      for (ptr = old_ops; ptr; ptr = ptr->next)
905
        delink_imm_use (MUSTDEF_KILL_PTR (ptr));
906
      old_ops->next = free_mustdefs;
907
      free_mustdefs = old_ops;
908
    }
909
 
910
  /* Now set the stmt's operands.  */
911
  MUSTDEF_OPS (stmt) = new_list.next;
912
 
913
#ifdef ENABLE_CHECKING
914
  {
915
    unsigned x = 0;
916
    for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next)
917
      x++;
918
 
919
    gcc_assert (x == VEC_length (tree, build_v_must_defs));
920
  }
921
#endif
922
}
923
 
924
static void
925
finalize_ssa_v_must_defs (tree stmt)
926
{
927
  /* In the presence of subvars, there may be more than one V_MUST_DEF
928
     per statement (one for each subvar).  It is a bit expensive to
929
     verify that all must-defs in a statement belong to subvars if
930
     there is more than one must-def, so we don't do it.  Suffice to
931
     say, if you reach here without having subvars, and have num >1,
932
     you have hit a bug.  */
933
  finalize_ssa_v_must_def_ops (stmt);
934
  VEC_truncate (tree, build_v_must_defs, 0);
935
}
936
 
937
 
938
/* Finalize all the build vectors, fill the new ones into INFO.  */
939
 
940
static inline void
941
finalize_ssa_stmt_operands (tree stmt)
942
{
943
  finalize_ssa_defs (stmt);
944
  finalize_ssa_uses (stmt);
945
  finalize_ssa_v_must_defs (stmt);
946
  finalize_ssa_v_may_defs (stmt);
947
  finalize_ssa_vuses (stmt);
948
}
949
 
950
 
951
/* Start the process of building up operands vectors in INFO.  */
952
 
953
static inline void
954
start_ssa_stmt_operands (void)
955
{
956
  gcc_assert (VEC_length (tree, build_defs) == 0);
957
  gcc_assert (VEC_length (tree, build_uses) == 0);
958
  gcc_assert (VEC_length (tree, build_vuses) == 0);
959
  gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
960
  gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
961
}
962
 
963
 
964
/* Add DEF_P to the list of pointers to operands.  */
965
 
966
static inline void
967
append_def (tree *def_p)
968
{
969
  VEC_safe_push (tree, heap, build_defs, (tree)def_p);
970
}
971
 
972
 
973
/* Add USE_P to the list of pointers to operands.  */
974
 
975
static inline void
976
append_use (tree *use_p)
977
{
978
  VEC_safe_push (tree, heap, build_uses, (tree)use_p);
979
}
980
 
981
 
982
/* Add a new virtual may def for variable VAR to the build array.  */
983
 
984
static inline void
985
append_v_may_def (tree var)
986
{
987
  if (TREE_CODE (var) != SSA_NAME)
988
    {
989
      var_ann_t ann = get_var_ann (var);
990
 
991
      /* Don't allow duplicate entries.  */
992
      if (ann->in_v_may_def_list)
993
        return;
994
      ann->in_v_may_def_list = 1;
995
    }
996
 
997
  VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
998
}
999
 
1000
 
1001
/* Add VAR to the list of virtual uses.  */
1002
 
1003
static inline void
1004
append_vuse (tree var)
1005
{
1006
  /* Don't allow duplicate entries.  */
1007
  if (TREE_CODE (var) != SSA_NAME)
1008
    {
1009
      var_ann_t ann = get_var_ann (var);
1010
 
1011
      if (ann->in_vuse_list || ann->in_v_may_def_list)
1012
        return;
1013
      ann->in_vuse_list = 1;
1014
    }
1015
 
1016
  VEC_safe_push (tree, heap, build_vuses, (tree)var);
1017
}
1018
 
1019
 
1020
/* Add VAR to the list of virtual must definitions for INFO.  */
1021
 
1022
static inline void
1023
append_v_must_def (tree var)
1024
{
1025
  unsigned i;
1026
 
1027
  /* Don't allow duplicate entries.  */
1028
  for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
1029
    if (var == VEC_index (tree, build_v_must_defs, i))
1030
      return;
1031
 
1032
  VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
1033
}
1034
 
1035
 
1036
/* REF is a tree that contains the entire pointer dereference
1037
   expression, if available, or NULL otherwise.  ALIAS is the variable
1038
   we are asking if REF can access.  OFFSET and SIZE come from the
1039
   memory access expression that generated this virtual operand.  */
1040
 
1041
static bool
1042
access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1043
                           HOST_WIDE_INT size)
1044
{
1045
  bool offsetgtz = offset > 0;
1046
  unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1047
  tree base = ref ? get_base_address (ref) : NULL;
1048
 
1049
  /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1050
     using a call-clobbered memory tag.  By definition, call-clobbered
1051
     memory tags can always touch .GLOBAL_VAR.  */
1052
  if (alias == global_var)
1053
    return true;
1054
 
1055
  /* We cannot prune nonlocal aliases because they are not type
1056
     specific.  */
1057
  if (alias == nonlocal_all)
1058
    return true;
1059
 
1060
  /* If ALIAS is an SFT, it can't be touched if the offset
1061
     and size of the access is not overlapping with the SFT offset and
1062
     size.  This is only true if we are accessing through a pointer
1063
     to a type that is the same as SFT_PARENT_VAR.  Otherwise, we may
1064
     be accessing through a pointer to some substruct of the
1065
     structure, and if we try to prune there, we will have the wrong
1066
     offset, and get the wrong answer.
1067
     i.e., we can't prune without more work if we have something like
1068
 
1069
     struct gcc_target
1070
     {
1071
       struct asm_out
1072
       {
1073
         const char *byte_op;
1074
         struct asm_int_op
1075
         {
1076
           const char *hi;
1077
         } aligned_op;
1078
       } asm_out;
1079
     } targetm;
1080
 
1081
     foo = &targetm.asm_out.aligned_op;
1082
     return foo->hi;
1083
 
1084
     SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1085
     terms of SFT_PARENT_VAR, that is where it is.
1086
     However, the access through the foo pointer will be at offset 0.  */
1087
  if (size != -1
1088
      && TREE_CODE (alias) == STRUCT_FIELD_TAG
1089
      && base
1090
      && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1091
      && !overlap_subvar (offset, size, alias, NULL))
1092
    {
1093
#ifdef ACCESS_DEBUGGING
1094
      fprintf (stderr, "Access to ");
1095
      print_generic_expr (stderr, ref, 0);
1096
      fprintf (stderr, " may not touch ");
1097
      print_generic_expr (stderr, alias, 0);
1098
      fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1099
#endif
1100
      return false;
1101
    }
1102
 
1103
  /* Without strict aliasing, it is impossible for a component access
1104
     through a pointer to touch a random variable, unless that
1105
     variable *is* a structure or a pointer.
1106
 
1107
     That is, given p->c, and some random global variable b,
1108
     there is no legal way that p->c could be an access to b.
1109
 
1110
     Without strict aliasing on, we consider it legal to do something
1111
     like:
1112
 
1113
     struct foos { int l; };
1114
     int foo;
1115
     static struct foos *getfoo(void);
1116
     int main (void)
1117
     {
1118
       struct foos *f = getfoo();
1119
       f->l = 1;
1120
       foo = 2;
1121
       if (f->l == 1)
1122
         abort();
1123
       exit(0);
1124
     }
1125
     static struct foos *getfoo(void)
1126
     { return (struct foos *)&foo; }
1127
 
1128
     (taken from 20000623-1.c)
1129
 
1130
     The docs also say/imply that access through union pointers
1131
     is legal (but *not* if you take the address of the union member,
1132
     i.e. the inverse), such that you can do
1133
 
1134
     typedef union {
1135
       int d;
1136
     } U;
1137
 
1138
     int rv;
1139
     void breakme()
1140
     {
1141
       U *rv0;
1142
       U *pretmp = (U*)&rv;
1143
       rv0 = pretmp;
1144
       rv0->d = 42;
1145
     }
1146
     To implement this, we just punt on accesses through union
1147
     pointers entirely.
1148
  */
1149
  else if (ref
1150
           && flag_strict_aliasing
1151
           && TREE_CODE (ref) != INDIRECT_REF
1152
           && !MTAG_P (alias)
1153
           && (TREE_CODE (base) != INDIRECT_REF
1154
               || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
1155
           && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1156
           && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1157
           && !POINTER_TYPE_P (TREE_TYPE (alias))
1158
           /* When the struct has may_alias attached to it, we need not to
1159
              return true.  */
1160
           && get_alias_set (base))
1161
    {
1162
#ifdef ACCESS_DEBUGGING
1163
      fprintf (stderr, "Access to ");
1164
      print_generic_expr (stderr, ref, 0);
1165
      fprintf (stderr, " may not touch ");
1166
      print_generic_expr (stderr, alias, 0);
1167
      fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1168
#endif
1169
      return false;
1170
    }
1171
 
1172
  /* If the offset of the access is greater than the size of one of
1173
     the possible aliases, it can't be touching that alias, because it
1174
     would be past the end of the structure.  */
1175
  else if (ref
1176
           && flag_strict_aliasing
1177
           && TREE_CODE (ref) != INDIRECT_REF
1178
           && !MTAG_P (alias)
1179
           && !POINTER_TYPE_P (TREE_TYPE (alias))
1180
           && offsetgtz
1181
           && DECL_SIZE (alias)
1182
           && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1183
           && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1184
    {
1185
#ifdef ACCESS_DEBUGGING
1186
      fprintf (stderr, "Access to ");
1187
      print_generic_expr (stderr, ref, 0);
1188
      fprintf (stderr, " may not touch ");
1189
      print_generic_expr (stderr, alias, 0);
1190
      fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1191
#endif
1192
      return false;
1193
    }
1194
 
1195
  return true;
1196
}
1197
 
1198
 
1199
/* Add VAR to the virtual operands array.  FLAGS is as in
1200
   get_expr_operands.  FULL_REF is a tree that contains the entire
1201
   pointer dereference expression, if available, or NULL otherwise.
1202
   OFFSET and SIZE come from the memory access expression that
1203
   generated this virtual operand.  FOR_CLOBBER is true is this is
1204
   adding a virtual operand for a call clobber.  */
1205
 
1206
static void
1207
add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1208
                     tree full_ref, HOST_WIDE_INT offset,
1209
                     HOST_WIDE_INT size, bool for_clobber)
1210
{
1211
  VEC(tree,gc) *aliases;
1212
  tree sym;
1213
  var_ann_t v_ann;
1214
 
1215
  sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1216
  v_ann = var_ann (sym);
1217
 
1218
  /* Mark statements with volatile operands.  Optimizers should back
1219
     off from statements having volatile operands.  */
1220
  if (TREE_THIS_VOLATILE (sym) && s_ann)
1221
    s_ann->has_volatile_ops = true;
1222
 
1223
  /* If the variable cannot be modified and this is a V_MAY_DEF change
1224
     it into a VUSE.  This happens when read-only variables are marked
1225
     call-clobbered and/or aliased to writable variables.  So we only
1226
     check that this only happens on non-specific stores.
1227
 
1228
     Note that if this is a specific store, i.e. associated with a
1229
     modify_expr, then we can't suppress the V_MAY_DEF, lest we run
1230
     into validation problems.
1231
 
1232
     This can happen when programs cast away const, leaving us with a
1233
     store to read-only memory.  If the statement is actually executed
1234
     at runtime, then the program is ill formed.  If the statement is
1235
     not executed then all is well.  At the very least, we cannot ICE.  */
1236
  if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1237
    flags &= ~(opf_is_def | opf_kill_def);
1238
 
1239
  /* The variable is not a GIMPLE register.  Add it (or its aliases) to
1240
     virtual operands, unless the caller has specifically requested
1241
     not to add virtual operands (used when adding operands inside an
1242
     ADDR_EXPR expression).  */
1243
  if (flags & opf_no_vops)
1244
    return;
1245
 
1246
  aliases = v_ann->may_aliases;
1247
  if (aliases == NULL)
1248
    {
1249
      /* The variable is not aliased or it is an alias tag.  */
1250
      if (flags & opf_is_def)
1251
        {
1252
          if (flags & opf_kill_def)
1253
            {
1254
              /* V_MUST_DEF for non-aliased, non-GIMPLE register
1255
                 variable definitions.  */
1256
              gcc_assert (!MTAG_P (var)
1257
                          || TREE_CODE (var) == STRUCT_FIELD_TAG);
1258
              append_v_must_def (var);
1259
            }
1260
          else
1261
            {
1262
              /* Add a V_MAY_DEF for call-clobbered variables and
1263
                 memory tags.  */
1264
              append_v_may_def (var);
1265
            }
1266
        }
1267
      else
1268
        append_vuse (var);
1269
    }
1270
  else
1271
    {
1272
      unsigned i;
1273
      tree al;
1274
 
1275
      /* The variable is aliased.  Add its aliases to the virtual
1276
         operands.  */
1277
      gcc_assert (VEC_length (tree, aliases) != 0);
1278
 
1279
      if (flags & opf_is_def)
1280
        {
1281
 
1282
          bool none_added = true;
1283
 
1284
          for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1285
            {
1286
              if (!access_can_touch_variable (full_ref, al, offset, size))
1287
                continue;
1288
 
1289
              none_added = false;
1290
              append_v_may_def (al);
1291
            }
1292
 
1293
          /* If the variable is also an alias tag, add a virtual
1294
             operand for it, otherwise we will miss representing
1295
             references to the members of the variable's alias set.
1296
             This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1297
 
1298
             It is also necessary to add bare defs on clobbers for
1299
             SMT's, so that bare SMT uses caused by pruning all the
1300
             aliases will link up properly with calls.   In order to
1301
             keep the number of these bare defs we add down to the
1302
             minimum necessary, we keep track of which SMT's were used
1303
             alone in statement vdefs or VUSEs.  */
1304
          if (v_ann->is_aliased
1305
              || none_added
1306
              || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1307
                  && for_clobber
1308
                  && SMT_USED_ALONE (var)))
1309
            {
1310
              /* Every bare SMT def we add should have SMT_USED_ALONE
1311
                 set on it, or else we will get the wrong answer on
1312
                 clobbers.  Sadly, this assertion trips on code that
1313
                 violates strict aliasing rules, because they *do* get
1314
                 the clobbers wrong, since it is illegal code.  As a
1315
                 result, we currently only enable it for aliasing
1316
                 debugging.  Someone might wish to turn this code into
1317
                 a nice strict-aliasing warning, since we *know* it
1318
                 will get the wrong answer...  */
1319
#ifdef ACCESS_DEBUGGING
1320
              if (none_added
1321
                  && !updating_used_alone && aliases_computed_p
1322
                  && TREE_CODE (var) == SYMBOL_MEMORY_TAG)
1323
                gcc_assert (SMT_USED_ALONE (var));
1324
#endif
1325
              append_v_may_def (var);
1326
            }
1327
        }
1328
      else
1329
        {
1330
          bool none_added = true;
1331
          for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1332
            {
1333
              if (!access_can_touch_variable (full_ref, al, offset, size))
1334
                continue;
1335
              none_added = false;
1336
              append_vuse (al);
1337
            }
1338
 
1339
          /* Similarly, append a virtual uses for VAR itself, when
1340
             it is an alias tag.  */
1341
          if (v_ann->is_aliased || none_added)
1342
            append_vuse (var);
1343
        }
1344
    }
1345
}
1346
 
1347
 
1348
/* Add *VAR_P to the appropriate operand array for S_ANN.  FLAGS is as in
1349
   get_expr_operands.  If *VAR_P is a GIMPLE register, it will be added to
1350
   the statement's real operands, otherwise it is added to virtual
1351
   operands.  */
1352
 
1353
static void
1354
add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1355
{
1356
  bool is_real_op;
1357
  tree var, sym;
1358
  var_ann_t v_ann;
1359
 
1360
  var = *var_p;
1361
  gcc_assert (SSA_VAR_P (var));
1362
 
1363
  is_real_op = is_gimple_reg (var);
1364
 
1365
  /* If this is a real operand, the operand is either an SSA name or a
1366
     decl.  Virtual operands may only be decls.  */
1367
  gcc_assert (is_real_op || DECL_P (var));
1368
 
1369
  sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1370
  v_ann = var_ann (sym);
1371
 
1372
  /* Mark statements with volatile operands.  Optimizers should back
1373
     off from statements having volatile operands.  */
1374
  if (TREE_THIS_VOLATILE (sym) && s_ann)
1375
    s_ann->has_volatile_ops = true;
1376
 
1377
  if (is_real_op)
1378
    {
1379
      /* The variable is a GIMPLE register.  Add it to real operands.  */
1380
      if (flags & opf_is_def)
1381
        append_def (var_p);
1382
      else
1383
        append_use (var_p);
1384
    }
1385
  else
1386
    add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1387
}
1388
 
1389
 
1390
/* A subroutine of get_expr_operands to handle INDIRECT_REF,
1391
   ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1392
 
1393
   STMT is the statement being processed, EXPR is the INDIRECT_REF
1394
      that got us here.
1395
 
1396
   FLAGS is as in get_expr_operands.
1397
 
1398
   FULL_REF contains the full pointer dereference expression, if we
1399
      have it, or NULL otherwise.
1400
 
1401
   OFFSET and SIZE are the location of the access inside the
1402
      dereferenced pointer, if known.
1403
 
1404
   RECURSE_ON_BASE should be set to true if we want to continue
1405
      calling get_expr_operands on the base pointer, and false if
1406
      something else will do it for us.  */
1407
 
1408
static void
1409
get_indirect_ref_operands (tree stmt, tree expr, int flags,
1410
                           tree full_ref,
1411
                           HOST_WIDE_INT offset, HOST_WIDE_INT size,
1412
                           bool recurse_on_base)
1413
{
1414
  tree *pptr = &TREE_OPERAND (expr, 0);
1415
  tree ptr = *pptr;
1416
  stmt_ann_t s_ann = stmt_ann (stmt);
1417
 
1418
  /* Stores into INDIRECT_REF operands are never killing definitions.  */
1419
  flags &= ~opf_kill_def;
1420
 
1421
  if (SSA_VAR_P (ptr))
1422
    {
1423
      struct ptr_info_def *pi = NULL;
1424
 
1425
      /* If PTR has flow-sensitive points-to information, use it.  */
1426
      if (TREE_CODE (ptr) == SSA_NAME
1427
          && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1428
          && pi->name_mem_tag)
1429
        {
1430
          /* PTR has its own memory tag.  Use it.  */
1431
          add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1432
                               full_ref, offset, size, false);
1433
        }
1434
      else
1435
        {
1436
          /* If PTR is not an SSA_NAME or it doesn't have a name
1437
             tag, use its symbol memory tag.  */
1438
          var_ann_t v_ann;
1439
 
1440
          /* If we are emitting debugging dumps, display a warning if
1441
             PTR is an SSA_NAME with no flow-sensitive alias
1442
             information.  That means that we may need to compute
1443
             aliasing again.  */
1444
          if (dump_file
1445
              && TREE_CODE (ptr) == SSA_NAME
1446
              && pi == NULL)
1447
            {
1448
              fprintf (dump_file,
1449
                  "NOTE: no flow-sensitive alias info for ");
1450
              print_generic_expr (dump_file, ptr, dump_flags);
1451
              fprintf (dump_file, " in ");
1452
              print_generic_stmt (dump_file, stmt, dump_flags);
1453
            }
1454
 
1455
          if (TREE_CODE (ptr) == SSA_NAME)
1456
            ptr = SSA_NAME_VAR (ptr);
1457
          v_ann = var_ann (ptr);
1458
 
1459
          if (v_ann->symbol_mem_tag)
1460
            add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1461
                                 full_ref, offset, size, false);
1462
        }
1463
    }
1464
  else if (TREE_CODE (ptr) == INTEGER_CST)
1465
    {
1466
      /* If a constant is used as a pointer, we can't generate a real
1467
         operand for it but we mark the statement volatile to prevent
1468
         optimizations from messing things up.  */
1469
      if (s_ann)
1470
        s_ann->has_volatile_ops = true;
1471
      return;
1472
    }
1473
  else
1474
    {
1475
      /* Ok, this isn't even is_gimple_min_invariant.  Something's broke.  */
1476
      gcc_unreachable ();
1477
    }
1478
 
1479
  /* If requested, add a USE operand for the base pointer.  */
1480
  if (recurse_on_base)
1481
    get_expr_operands (stmt, pptr, opf_none);
1482
}
1483
 
1484
 
1485
/* A subroutine of get_expr_operands to handle TARGET_MEM_REF.  */
1486
 
1487
static void
1488
get_tmr_operands (tree stmt, tree expr, int flags)
1489
{
1490
  tree tag = TMR_TAG (expr), ref;
1491
  HOST_WIDE_INT offset, size, maxsize;
1492
  subvar_t svars, sv;
1493
  stmt_ann_t s_ann = stmt_ann (stmt);
1494
 
1495
  /* First record the real operands.  */
1496
  get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1497
  get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1498
 
1499
  /* MEM_REFs should never be killing.  */
1500
  flags &= ~opf_kill_def;
1501
 
1502
  if (TMR_SYMBOL (expr))
1503
    {
1504
      stmt_ann_t ann = stmt_ann (stmt);
1505
      add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1506
    }
1507
 
1508
  if (!tag)
1509
    {
1510
      /* Something weird, so ensure that we will be careful.  */
1511
      stmt_ann (stmt)->has_volatile_ops = true;
1512
      return;
1513
    }
1514
 
1515
  if (DECL_P (tag))
1516
    {
1517
      get_expr_operands (stmt, &tag, flags);
1518
      return;
1519
    }
1520
 
1521
  ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1522
  gcc_assert (ref != NULL_TREE);
1523
  svars = get_subvars_for_var (ref);
1524
  for (sv = svars; sv; sv = sv->next)
1525
    {
1526
      bool exact;
1527
      if (overlap_subvar (offset, maxsize, sv->var, &exact))
1528
        {
1529
          int subvar_flags = flags;
1530
          if (!exact || size != maxsize)
1531
            subvar_flags &= ~opf_kill_def;
1532
          add_stmt_operand (&sv->var, s_ann, subvar_flags);
1533
        }
1534
    }
1535
}
1536
 
1537
 
1538
/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1539
   clobbered variables in the function.  */
1540
 
1541
static void
1542
add_call_clobber_ops (tree stmt, tree callee)
1543
{
1544
  unsigned u;
1545
  bitmap_iterator bi;
1546
  stmt_ann_t s_ann = stmt_ann (stmt);
1547
  bitmap not_read_b, not_written_b;
1548
 
1549
  /* Functions that are not const, pure or never return may clobber
1550
     call-clobbered variables.  */
1551
  if (s_ann)
1552
    s_ann->makes_clobbering_call = true;
1553
 
1554
  /* If we created .GLOBAL_VAR earlier, just use it.  See compute_may_aliases
1555
     for the heuristic used to decide whether to create .GLOBAL_VAR or not.  */
1556
  if (global_var)
1557
    {
1558
      add_stmt_operand (&global_var, s_ann, opf_is_def);
1559
      return;
1560
    }
1561
 
1562
  /* Get info for local and module level statics.  There is a bit
1563
     set for each static if the call being processed does not read
1564
     or write that variable.  */
1565
  not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1566
  not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1567
  /* Add a V_MAY_DEF operand for every call clobbered variable.  */
1568
  EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1569
    {
1570
      tree var = referenced_var_lookup (u);
1571
      unsigned int escape_mask = var_ann (var)->escape_mask;
1572
      tree real_var = var;
1573
      bool not_read;
1574
      bool not_written;
1575
 
1576
      /* Not read and not written are computed on regular vars, not
1577
         subvars, so look at the parent var if this is an SFT. */
1578
      if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1579
        real_var = SFT_PARENT_VAR (var);
1580
 
1581
      not_read = not_read_b ? bitmap_bit_p (not_read_b,
1582
                                            DECL_UID (real_var)) : false;
1583
      not_written = not_written_b ? bitmap_bit_p (not_written_b,
1584
                                                  DECL_UID (real_var)) : false;
1585
      gcc_assert (!unmodifiable_var_p (var));
1586
 
1587
      clobber_stats.clobbered_vars++;
1588
 
1589
      /* See if this variable is really clobbered by this function.  */
1590
 
1591
      /* Trivial case: Things escaping only to pure/const are not
1592
         clobbered by non-pure-const, and only read by pure/const. */
1593
      if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1594
        {
1595
          tree call = get_call_expr_in (stmt);
1596
          if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1597
            {
1598
              add_stmt_operand (&var, s_ann, opf_none);
1599
              clobber_stats.unescapable_clobbers_avoided++;
1600
              continue;
1601
            }
1602
          else
1603
            {
1604
              clobber_stats.unescapable_clobbers_avoided++;
1605
              continue;
1606
            }
1607
        }
1608
 
1609
      if (not_written)
1610
        {
1611
          clobber_stats.static_write_clobbers_avoided++;
1612
          if (!not_read)
1613
            add_stmt_operand (&var, s_ann, opf_none);
1614
          else
1615
            clobber_stats.static_read_clobbers_avoided++;
1616
        }
1617
      else
1618
        add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true);
1619
    }
1620
}
1621
 
1622
 
1623
/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1624
   function.  */
1625
 
1626
static void
1627
add_call_read_ops (tree stmt, tree callee)
1628
{
1629
  unsigned u;
1630
  bitmap_iterator bi;
1631
  stmt_ann_t s_ann = stmt_ann (stmt);
1632
  bitmap not_read_b;
1633
 
1634
  /* if the function is not pure, it may reference memory.  Add
1635
     a VUSE for .GLOBAL_VAR if it has been created.  See add_referenced_var
1636
     for the heuristic used to decide whether to create .GLOBAL_VAR.  */
1637
  if (global_var)
1638
    {
1639
      add_stmt_operand (&global_var, s_ann, opf_none);
1640
      return;
1641
    }
1642
 
1643
  not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1644
 
1645
  /* Add a VUSE for each call-clobbered variable.  */
1646
  EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1647
    {
1648
      tree var = referenced_var (u);
1649
      tree real_var = var;
1650
      bool not_read;
1651
 
1652
      clobber_stats.readonly_clobbers++;
1653
 
1654
      /* Not read and not written are computed on regular vars, not
1655
         subvars, so look at the parent var if this is an SFT. */
1656
 
1657
      if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1658
        real_var = SFT_PARENT_VAR (var);
1659
 
1660
      not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1661
                            : false;
1662
 
1663
      if (not_read)
1664
        {
1665
          clobber_stats.static_readonly_clobbers_avoided++;
1666
          continue;
1667
        }
1668
 
1669
      add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1670
    }
1671
}
1672
 
1673
 
1674
/* A subroutine of get_expr_operands to handle CALL_EXPR.  */
1675
 
1676
static void
1677
get_call_expr_operands (tree stmt, tree expr)
1678
{
1679
  tree op;
1680
  int call_flags = call_expr_flags (expr);
1681
 
1682
  /* If aliases have been computed already, add V_MAY_DEF or V_USE
1683
     operands for all the symbols that have been found to be
1684
     call-clobbered.
1685
 
1686
     Note that if aliases have not been computed, the global effects
1687
     of calls will not be included in the SSA web. This is fine
1688
     because no optimizer should run before aliases have been
1689
     computed.  By not bothering with virtual operands for CALL_EXPRs
1690
     we avoid adding superfluous virtual operands, which can be a
1691
     significant compile time sink (See PR 15855).  */
1692
  if (aliases_computed_p
1693
      && !bitmap_empty_p (call_clobbered_vars)
1694
      && !(call_flags & ECF_NOVOPS))
1695
    {
1696
      /* A 'pure' or a 'const' function never call-clobbers anything.
1697
         A 'noreturn' function might, but since we don't return anyway
1698
         there is no point in recording that.  */
1699
      if (TREE_SIDE_EFFECTS (expr)
1700
          && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1701
        add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1702
      else if (!(call_flags & ECF_CONST))
1703
        add_call_read_ops (stmt, get_callee_fndecl (expr));
1704
    }
1705
 
1706
  /* Find uses in the called function.  */
1707
  get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1708
 
1709
  for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1710
    get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1711
 
1712
  get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1713
}
1714
 
1715
 
1716
/* Scan operands in the ASM_EXPR stmt referred to in INFO.  */
1717
 
1718
static void
1719
get_asm_expr_operands (tree stmt)
1720
{
1721
  stmt_ann_t s_ann = stmt_ann (stmt);
1722
  int noutputs = list_length (ASM_OUTPUTS (stmt));
1723
  const char **oconstraints
1724
    = (const char **) alloca ((noutputs) * sizeof (const char *));
1725
  int i;
1726
  tree link;
1727
  const char *constraint;
1728
  bool allows_mem, allows_reg, is_inout;
1729
 
1730
  for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1731
    {
1732
      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1733
      oconstraints[i] = constraint;
1734
      parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1735
                               &allows_reg, &is_inout);
1736
 
1737
      /* This should have been split in gimplify_asm_expr.  */
1738
      gcc_assert (!allows_reg || !is_inout);
1739
 
1740
      /* Memory operands are addressable.  Note that STMT needs the
1741
         address of this operand.  */
1742
      if (!allows_reg && allows_mem)
1743
        {
1744
          tree t = get_base_address (TREE_VALUE (link));
1745
          if (t && DECL_P (t) && s_ann)
1746
            add_to_addressable_set (t, &s_ann->addresses_taken);
1747
        }
1748
 
1749
      get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1750
    }
1751
 
1752
  for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1753
    {
1754
      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1755
      parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1756
                              oconstraints, &allows_mem, &allows_reg);
1757
 
1758
      /* Memory operands are addressable.  Note that STMT needs the
1759
         address of this operand.  */
1760
      if (!allows_reg && allows_mem)
1761
        {
1762
          tree t = get_base_address (TREE_VALUE (link));
1763
          if (t && DECL_P (t) && s_ann)
1764
            add_to_addressable_set (t, &s_ann->addresses_taken);
1765
        }
1766
 
1767
      get_expr_operands (stmt, &TREE_VALUE (link), 0);
1768
    }
1769
 
1770
 
1771
  /* Clobber memory for asm ("" : : : "memory");  */
1772
  for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1773
    if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1774
      {
1775
        unsigned i;
1776
        bitmap_iterator bi;
1777
 
1778
        /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1779
           decided to group them).  */
1780
        if (global_var)
1781
          add_stmt_operand (&global_var, s_ann, opf_is_def);
1782
        else
1783
          EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1784
            {
1785
              tree var = referenced_var (i);
1786
              add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1787
            }
1788
 
1789
        /* Now clobber all addressables.  */
1790
        EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1791
            {
1792
              tree var = referenced_var (i);
1793
 
1794
              /* Subvars are explicitly represented in this list, so
1795
                 we don't need the original to be added to the clobber
1796
                 ops, but the original *will* be in this list because
1797
                 we keep the addressability of the original
1798
                 variable up-to-date so we don't screw up the rest of
1799
                 the backend.  */
1800
              if (var_can_have_subvars (var)
1801
                  && get_subvars_for_var (var) != NULL)
1802
                continue;
1803
 
1804
              add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1805
            }
1806
 
1807
        break;
1808
      }
1809
}
1810
 
1811
 
1812
/* Scan operands for the assignment expression EXPR in statement STMT.  */
1813
 
1814
static void
1815
get_modify_expr_operands (tree stmt, tree expr)
1816
{
1817
  /* First get operands from the RHS.  */
1818
  get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1819
 
1820
  /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
1821
     registers.  If the LHS is a store to memory, we will either need
1822
     a preserving definition (V_MAY_DEF) or a killing definition
1823
     (V_MUST_DEF).
1824
 
1825
     Preserving definitions are those that modify a part of an
1826
     aggregate object for which no subvars have been computed (or the
1827
     reference does not correspond exactly to one of them). Stores
1828
     through a pointer are also represented with V_MAY_DEF operators.
1829
 
1830
     The determination of whether to use a preserving or a killing
1831
     definition is done while scanning the LHS of the assignment.  By
1832
     default, assume that we will emit a V_MUST_DEF.  */
1833
  get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def|opf_kill_def);
1834
}
1835
 
1836
 
1837
/* Recursively scan the expression pointed to by EXPR_P in statement
1838
   STMT.  FLAGS is one of the OPF_* constants modifying how to
1839
   interpret the operands found.  */
1840
 
1841
static void
1842
get_expr_operands (tree stmt, tree *expr_p, int flags)
1843
{
1844
  enum tree_code code;
1845
  enum tree_code_class class;
1846
  tree expr = *expr_p;
1847
  stmt_ann_t s_ann = stmt_ann (stmt);
1848
 
1849
  if (expr == NULL)
1850
    return;
1851
 
1852
  code = TREE_CODE (expr);
1853
  class = TREE_CODE_CLASS (code);
1854
 
1855
  switch (code)
1856
    {
1857
    case ADDR_EXPR:
1858
      /* Taking the address of a variable does not represent a
1859
         reference to it, but the fact that the statement takes its
1860
         address will be of interest to some passes (e.g. alias
1861
         resolution).  */
1862
      add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
1863
 
1864
      /* If the address is invariant, there may be no interesting
1865
         variable references inside.  */
1866
      if (is_gimple_min_invariant (expr))
1867
        return;
1868
 
1869
      /* Otherwise, there may be variables referenced inside but there
1870
         should be no VUSEs created, since the referenced objects are
1871
         not really accessed.  The only operands that we should find
1872
         here are ARRAY_REF indices which will always be real operands
1873
         (GIMPLE does not allow non-registers as array indices).  */
1874
      flags |= opf_no_vops;
1875
      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1876
      return;
1877
 
1878
    case SSA_NAME:
1879
    case STRUCT_FIELD_TAG:
1880
    case SYMBOL_MEMORY_TAG:
1881
    case NAME_MEMORY_TAG:
1882
     add_stmt_operand (expr_p, s_ann, flags);
1883
     return;
1884
 
1885
    case VAR_DECL:
1886
    case PARM_DECL:
1887
    case RESULT_DECL:
1888
      {
1889
        subvar_t svars;
1890
 
1891
        /* Add the subvars for a variable, if it has subvars, to DEFS
1892
           or USES.  Otherwise, add the variable itself.  Whether it
1893
           goes to USES or DEFS depends on the operand flags.  */
1894
        if (var_can_have_subvars (expr)
1895
            && (svars = get_subvars_for_var (expr)))
1896
          {
1897
            subvar_t sv;
1898
            for (sv = svars; sv; sv = sv->next)
1899
              add_stmt_operand (&sv->var, s_ann, flags);
1900
          }
1901
        else
1902
          add_stmt_operand (expr_p, s_ann, flags);
1903
 
1904
        return;
1905
      }
1906
 
1907
    case MISALIGNED_INDIRECT_REF:
1908
      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1909
      /* fall through */
1910
 
1911
    case ALIGN_INDIRECT_REF:
1912
    case INDIRECT_REF:
1913
      get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
1914
      return;
1915
 
1916
    case TARGET_MEM_REF:
1917
      get_tmr_operands (stmt, expr, flags);
1918
      return;
1919
 
1920
    case ARRAY_REF:
1921
    case ARRAY_RANGE_REF:
1922
    case COMPONENT_REF:
1923
    case REALPART_EXPR:
1924
    case IMAGPART_EXPR:
1925
      {
1926
        tree ref;
1927
        HOST_WIDE_INT offset, size, maxsize;
1928
        bool none = true;
1929
 
1930
        /* This component reference becomes an access to all of the
1931
           subvariables it can touch, if we can determine that, but
1932
           *NOT* the real one.  If we can't determine which fields we
1933
           could touch, the recursion will eventually get to a
1934
           variable and add *all* of its subvars, or whatever is the
1935
           minimum correct subset.  */
1936
        ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1937
        if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1938
          {
1939
            subvar_t sv;
1940
            subvar_t svars = get_subvars_for_var (ref);
1941
 
1942
            for (sv = svars; sv; sv = sv->next)
1943
              {
1944
                bool exact;
1945
 
1946
                if (overlap_subvar (offset, maxsize, sv->var, &exact))
1947
                  {
1948
                    int subvar_flags = flags;
1949
                    none = false;
1950
                    if (!exact || size != maxsize)
1951
                      subvar_flags &= ~opf_kill_def;
1952
                    add_stmt_operand (&sv->var, s_ann, subvar_flags);
1953
                  }
1954
              }
1955
 
1956
            if (!none)
1957
              flags |= opf_no_vops;
1958
          }
1959
        else if (TREE_CODE (ref) == INDIRECT_REF)
1960
          {
1961
            get_indirect_ref_operands (stmt, ref, flags, expr, offset,
1962
                                       maxsize, false);
1963
            flags |= opf_no_vops;
1964
          }
1965
 
1966
        /* Even if we found subvars above we need to ensure to see
1967
           immediate uses for d in s.a[d].  In case of s.a having
1968
           a subvar or we would miss it otherwise.  */
1969
        get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1970
                           flags & ~opf_kill_def);
1971
 
1972
        if (code == COMPONENT_REF)
1973
          {
1974
            if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1975
              s_ann->has_volatile_ops = true;
1976
            get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1977
          }
1978
        else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
1979
          {
1980
            get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1981
            get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1982
            get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1983
          }
1984
 
1985
        return;
1986
      }
1987
 
1988
    case WITH_SIZE_EXPR:
1989
      /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1990
         and an rvalue reference to its second argument.  */
1991
      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1992
      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1993
      return;
1994
 
1995
    case CALL_EXPR:
1996
      get_call_expr_operands (stmt, expr);
1997
      return;
1998
 
1999
    case COND_EXPR:
2000
    case VEC_COND_EXPR:
2001
      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
2002
      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
2003
      get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
2004
      return;
2005
 
2006
    case MODIFY_EXPR:
2007
      get_modify_expr_operands (stmt, expr);
2008
      return;
2009
 
2010
    case CONSTRUCTOR:
2011
      {
2012
        /* General aggregate CONSTRUCTORs have been decomposed, but they
2013
           are still in use as the COMPLEX_EXPR equivalent for vectors.  */
2014
        constructor_elt *ce;
2015
        unsigned HOST_WIDE_INT idx;
2016
 
2017
        for (idx = 0;
2018
             VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2019
             idx++)
2020
          get_expr_operands (stmt, &ce->value, opf_none);
2021
 
2022
        return;
2023
      }
2024
 
2025
    case BIT_FIELD_REF:
2026
      /* Stores using BIT_FIELD_REF are always preserving definitions.  */
2027
      flags &= ~opf_kill_def;
2028
 
2029
      /* Fallthru  */
2030
 
2031
    case TRUTH_NOT_EXPR:
2032
    case VIEW_CONVERT_EXPR:
2033
    do_unary:
2034
      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2035
      return;
2036
 
2037
    case TRUTH_AND_EXPR:
2038
    case TRUTH_OR_EXPR:
2039
    case TRUTH_XOR_EXPR:
2040
    case COMPOUND_EXPR:
2041
    case OBJ_TYPE_REF:
2042
    case ASSERT_EXPR:
2043
    do_binary:
2044
      {
2045
        get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2046
        get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2047
        return;
2048
      }
2049
 
2050
    case DOT_PROD_EXPR:
2051
    case REALIGN_LOAD_EXPR:
2052
      {
2053
        get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2054
        get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2055
        get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2056
        return;
2057
      }
2058
 
2059
    case BLOCK:
2060
    case FUNCTION_DECL:
2061
    case EXC_PTR_EXPR:
2062
    case FILTER_EXPR:
2063
    case LABEL_DECL:
2064
    case CONST_DECL:
2065
    case OMP_PARALLEL:
2066
    case OMP_SECTIONS:
2067
    case OMP_FOR:
2068
    case OMP_SINGLE:
2069
    case OMP_MASTER:
2070
    case OMP_ORDERED:
2071
    case OMP_CRITICAL:
2072
    case OMP_RETURN:
2073
    case OMP_CONTINUE:
2074
      /* Expressions that make no memory references.  */
2075
      return;
2076
 
2077
    default:
2078
      if (class == tcc_unary)
2079
        goto do_unary;
2080
      if (class == tcc_binary || class == tcc_comparison)
2081
        goto do_binary;
2082
      if (class == tcc_constant || class == tcc_type)
2083
        return;
2084
    }
2085
 
2086
  /* If we get here, something has gone wrong.  */
2087
#ifdef ENABLE_CHECKING
2088
  fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2089
  debug_tree (expr);
2090
  fputs ("\n", stderr);
2091
#endif
2092
  gcc_unreachable ();
2093
}
2094
 
2095
 
2096
/* Parse STMT looking for operands.  When finished, the various
2097
   build_* operand vectors will have potential operands in them.  */
2098
 
2099
static void
2100
parse_ssa_operands (tree stmt)
2101
{
2102
  enum tree_code code;
2103
 
2104
  code = TREE_CODE (stmt);
2105
  switch (code)
2106
    {
2107
    case MODIFY_EXPR:
2108
      get_modify_expr_operands (stmt, stmt);
2109
      break;
2110
 
2111
    case COND_EXPR:
2112
      get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
2113
      break;
2114
 
2115
    case SWITCH_EXPR:
2116
      get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
2117
      break;
2118
 
2119
    case ASM_EXPR:
2120
      get_asm_expr_operands (stmt);
2121
      break;
2122
 
2123
    case RETURN_EXPR:
2124
      get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
2125
      break;
2126
 
2127
    case GOTO_EXPR:
2128
      get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
2129
      break;
2130
 
2131
    case LABEL_EXPR:
2132
      get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
2133
      break;
2134
 
2135
    case BIND_EXPR:
2136
    case CASE_LABEL_EXPR:
2137
    case TRY_CATCH_EXPR:
2138
    case TRY_FINALLY_EXPR:
2139
    case EH_FILTER_EXPR:
2140
    case CATCH_EXPR:
2141
    case RESX_EXPR:
2142
      /* These nodes contain no variable references.  */
2143
      break;
2144
 
2145
    default:
2146
      /* Notice that if get_expr_operands tries to use &STMT as the
2147
         operand pointer (which may only happen for USE operands), we
2148
         will fail in add_stmt_operand.  This default will handle
2149
         statements like empty statements, or CALL_EXPRs that may
2150
         appear on the RHS of a statement or as statements themselves.  */
2151
      get_expr_operands (stmt, &stmt, opf_none);
2152
      break;
2153
    }
2154
}
2155
 
2156
 
2157
/* Create an operands cache for STMT.  */
2158
 
2159
static void
2160
build_ssa_operands (tree stmt)
2161
{
2162
  stmt_ann_t ann = get_stmt_ann (stmt);
2163
 
2164
  /* Initially assume that the statement has no volatile operands and
2165
     does not take the address of any symbols.  */
2166
  if (ann)
2167
    {
2168
      ann->has_volatile_ops = false;
2169
      if (ann->addresses_taken)
2170
        ann->addresses_taken = NULL;
2171
    }
2172
 
2173
  start_ssa_stmt_operands ();
2174
 
2175
  parse_ssa_operands (stmt);
2176
  operand_build_sort_virtual (build_vuses);
2177
  operand_build_sort_virtual (build_v_may_defs);
2178
  operand_build_sort_virtual (build_v_must_defs);
2179
 
2180
  finalize_ssa_stmt_operands (stmt);
2181
}
2182
 
2183
 
2184
/* Free any operands vectors in OPS.  */
2185
 
2186
void
2187
free_ssa_operands (stmt_operands_p ops)
2188
{
2189
  ops->def_ops = NULL;
2190
  ops->use_ops = NULL;
2191
  ops->maydef_ops = NULL;
2192
  ops->mustdef_ops = NULL;
2193
  ops->vuse_ops = NULL;
2194
}
2195
 
2196
 
2197
/* Get the operands of statement STMT.  */
2198
 
2199
void
2200
update_stmt_operands (tree stmt)
2201
{
2202
  stmt_ann_t ann = get_stmt_ann (stmt);
2203
 
2204
  /* If update_stmt_operands is called before SSA is initialized, do
2205
     nothing.  */
2206
  if (!ssa_operands_active ())
2207
    return;
2208
 
2209
  /* The optimizers cannot handle statements that are nothing but a
2210
     _DECL.  This indicates a bug in the gimplifier.  */
2211
  gcc_assert (!SSA_VAR_P (stmt));
2212
 
2213
  gcc_assert (ann->modified);
2214
 
2215
  timevar_push (TV_TREE_OPS);
2216
 
2217
  build_ssa_operands (stmt);
2218
 
2219
  /* Clear the modified bit for STMT.  */
2220
  ann->modified = 0;
2221
 
2222
  timevar_pop (TV_TREE_OPS);
2223
}
2224
 
2225
 
2226
/* Copies virtual operands from SRC to DST.  */
2227
 
2228
void
2229
copy_virtual_operands (tree dest, tree src)
2230
{
2231
  tree t;
2232
  ssa_op_iter iter, old_iter;
2233
  use_operand_p use_p, u2;
2234
  def_operand_p def_p, d2;
2235
 
2236
  build_ssa_operands (dest);
2237
 
2238
  /* Copy all the virtual fields.  */
2239
  FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
2240
    append_vuse (t);
2241
  FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
2242
    append_v_may_def (t);
2243
  FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
2244
    append_v_must_def (t);
2245
 
2246
  if (VEC_length (tree, build_vuses) == 0
2247
      && VEC_length (tree, build_v_may_defs) == 0
2248
      && VEC_length (tree, build_v_must_defs) == 0)
2249
    return;
2250
 
2251
  /* Now commit the virtual operands to this stmt.  */
2252
  finalize_ssa_v_must_defs (dest);
2253
  finalize_ssa_v_may_defs (dest);
2254
  finalize_ssa_vuses (dest);
2255
 
2256
  /* Finally, set the field to the same values as then originals.  */
2257
  t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
2258
  FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
2259
    {
2260
      gcc_assert (!op_iter_done (&old_iter));
2261
      SET_USE (use_p, t);
2262
      t = op_iter_next_tree (&old_iter);
2263
    }
2264
  gcc_assert (op_iter_done (&old_iter));
2265
 
2266
  op_iter_init_maydef (&old_iter, src, &u2, &d2);
2267
  FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
2268
    {
2269
      gcc_assert (!op_iter_done (&old_iter));
2270
      SET_USE (use_p, USE_FROM_PTR (u2));
2271
      SET_DEF (def_p, DEF_FROM_PTR (d2));
2272
      op_iter_next_maymustdef (&u2, &d2, &old_iter);
2273
    }
2274
  gcc_assert (op_iter_done (&old_iter));
2275
 
2276
  op_iter_init_mustdef (&old_iter, src, &u2, &d2);
2277
  FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
2278
    {
2279
      gcc_assert (!op_iter_done (&old_iter));
2280
      SET_USE (use_p, USE_FROM_PTR (u2));
2281
      SET_DEF (def_p, DEF_FROM_PTR (d2));
2282
      op_iter_next_maymustdef (&u2, &d2, &old_iter);
2283
    }
2284
  gcc_assert (op_iter_done (&old_iter));
2285
 
2286
}
2287
 
2288
 
2289
/* Specifically for use in DOM's expression analysis.  Given a store, we
2290
   create an artificial stmt which looks like a load from the store, this can
2291
   be used to eliminate redundant loads.  OLD_OPS are the operands from the
2292
   store stmt, and NEW_STMT is the new load which represents a load of the
2293
   values stored.  */
2294
 
2295
void
2296
create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
2297
{
2298
  stmt_ann_t ann;
2299
  tree op;
2300
  ssa_op_iter iter;
2301
  use_operand_p use_p;
2302
  unsigned x;
2303
 
2304
  ann = get_stmt_ann (new_stmt);
2305
 
2306
  /* Process the stmt looking for operands.  */
2307
  start_ssa_stmt_operands ();
2308
  parse_ssa_operands (new_stmt);
2309
 
2310
  for (x = 0; x < VEC_length (tree, build_vuses); x++)
2311
    {
2312
      tree t = VEC_index (tree, build_vuses, x);
2313
      if (TREE_CODE (t) != SSA_NAME)
2314
        {
2315
          var_ann_t ann = var_ann (t);
2316
          ann->in_vuse_list = 0;
2317
        }
2318
    }
2319
 
2320
  for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
2321
    {
2322
      tree t = VEC_index (tree, build_v_may_defs, x);
2323
      if (TREE_CODE (t) != SSA_NAME)
2324
        {
2325
          var_ann_t ann = var_ann (t);
2326
          ann->in_v_may_def_list = 0;
2327
        }
2328
    }
2329
 
2330
  /* Remove any virtual operands that were found.  */
2331
  VEC_truncate (tree, build_v_may_defs, 0);
2332
  VEC_truncate (tree, build_v_must_defs, 0);
2333
  VEC_truncate (tree, build_vuses, 0);
2334
 
2335
  /* For each VDEF on the original statement, we want to create a
2336
     VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2337
     statement.  */
2338
  FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
2339
                             (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
2340
    append_vuse (op);
2341
 
2342
  /* Now build the operands for this new stmt.  */
2343
  finalize_ssa_stmt_operands (new_stmt);
2344
 
2345
  /* All uses in this fake stmt must not be in the immediate use lists.  */
2346
  FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2347
    delink_imm_use (use_p);
2348
}
2349
 
2350
 
2351
/* Swap operands EXP0 and EXP1 in statement STMT.  No attempt is done
2352
   to test the validity of the swap operation.  */
2353
 
2354
void
2355
swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2356
{
2357
  tree op0, op1;
2358
  op0 = *exp0;
2359
  op1 = *exp1;
2360
 
2361
  /* If the operand cache is active, attempt to preserve the relative
2362
     positions of these two operands in their respective immediate use
2363
     lists.  */
2364
  if (ssa_operands_active () && op0 != op1)
2365
    {
2366
      use_optype_p use0, use1, ptr;
2367
      use0 = use1 = NULL;
2368
 
2369
      /* Find the 2 operands in the cache, if they are there.  */
2370
      for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2371
        if (USE_OP_PTR (ptr)->use == exp0)
2372
          {
2373
            use0 = ptr;
2374
            break;
2375
          }
2376
 
2377
      for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2378
        if (USE_OP_PTR (ptr)->use == exp1)
2379
          {
2380
            use1 = ptr;
2381
            break;
2382
          }
2383
 
2384
      /* If both uses don't have operand entries, there isn't much we can do
2385
         at this point.  Presumably we don't need to worry about it.  */
2386
      if (use0 && use1)
2387
        {
2388
          tree *tmp = USE_OP_PTR (use1)->use;
2389
          USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2390
          USE_OP_PTR (use0)->use = tmp;
2391
        }
2392
    }
2393
 
2394
  /* Now swap the data.  */
2395
  *exp0 = op1;
2396
  *exp1 = op0;
2397
}
2398
 
2399
 
2400
/* Add the base address of REF to the set *ADDRESSES_TAKEN.  If
2401
   *ADDRESSES_TAKEN is NULL, a new set is created.  REF may be
2402
   a single variable whose address has been taken or any other valid
2403
   GIMPLE memory reference (structure reference, array, etc).  If the
2404
   base address of REF is a decl that has sub-variables, also add all
2405
   of its sub-variables.  */
2406
 
2407
void
2408
add_to_addressable_set (tree ref, bitmap *addresses_taken)
2409
{
2410
  tree var;
2411
  subvar_t svars;
2412
 
2413
  gcc_assert (addresses_taken);
2414
 
2415
  /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2416
     as the only thing we take the address of.  If VAR is a structure,
2417
     taking the address of a field means that the whole structure may
2418
     be referenced using pointer arithmetic.  See PR 21407 and the
2419
     ensuing mailing list discussion.  */
2420
  var = get_base_address (ref);
2421
  if (var && SSA_VAR_P (var))
2422
    {
2423
      if (*addresses_taken == NULL)
2424
        *addresses_taken = BITMAP_GGC_ALLOC ();
2425
 
2426
      if (var_can_have_subvars (var)
2427
          && (svars = get_subvars_for_var (var)))
2428
        {
2429
          subvar_t sv;
2430
          for (sv = svars; sv; sv = sv->next)
2431
            {
2432
              bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2433
              TREE_ADDRESSABLE (sv->var) = 1;
2434
            }
2435
        }
2436
      else
2437
        {
2438
          bitmap_set_bit (*addresses_taken, DECL_UID (var));
2439
          TREE_ADDRESSABLE (var) = 1;
2440
        }
2441
    }
2442
}
2443
 
2444
 
2445
/* Scan the immediate_use list for VAR making sure its linked properly.
2446
   Return TRUE if there is a problem and emit an error message to F.  */
2447
 
2448
bool
2449
verify_imm_links (FILE *f, tree var)
2450
{
2451
  use_operand_p ptr, prev, list;
2452
  int count;
2453
 
2454
  gcc_assert (TREE_CODE (var) == SSA_NAME);
2455
 
2456
  list = &(SSA_NAME_IMM_USE_NODE (var));
2457
  gcc_assert (list->use == NULL);
2458
 
2459
  if (list->prev == NULL)
2460
    {
2461
      gcc_assert (list->next == NULL);
2462
      return false;
2463
    }
2464
 
2465
  prev = list;
2466
  count = 0;
2467
  for (ptr = list->next; ptr != list; )
2468
    {
2469
      if (prev != ptr->prev)
2470
        goto error;
2471
 
2472
      if (ptr->use == NULL)
2473
        goto error; /* 2 roots, or SAFE guard node.  */
2474
      else if (*(ptr->use) != var)
2475
        goto error;
2476
 
2477
      prev = ptr;
2478
      ptr = ptr->next;
2479
 
2480
      /* Avoid infinite loops.  50,000,000 uses probably indicates a
2481
         problem.  */
2482
      if (count++ > 50000000)
2483
        goto error;
2484
    }
2485
 
2486
  /* Verify list in the other direction.  */
2487
  prev = list;
2488
  for (ptr = list->prev; ptr != list; )
2489
    {
2490
      if (prev != ptr->next)
2491
        goto error;
2492
      prev = ptr;
2493
      ptr = ptr->prev;
2494
      if (count-- < 0)
2495
        goto error;
2496
    }
2497
 
2498
  if (count != 0)
2499
    goto error;
2500
 
2501
  return false;
2502
 
2503
 error:
2504
  if (ptr->stmt && stmt_modified_p (ptr->stmt))
2505
    {
2506
      fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2507
      print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2508
    }
2509
  fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2510
           (void *)ptr->use);
2511
  print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2512
  fprintf(f, "\n");
2513
  return true;
2514
}
2515
 
2516
 
2517
/* Dump all the immediate uses to FILE.  */
2518
 
2519
void
2520
dump_immediate_uses_for (FILE *file, tree var)
2521
{
2522
  imm_use_iterator iter;
2523
  use_operand_p use_p;
2524
 
2525
  gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2526
 
2527
  print_generic_expr (file, var, TDF_SLIM);
2528
  fprintf (file, " : -->");
2529
  if (has_zero_uses (var))
2530
    fprintf (file, " no uses.\n");
2531
  else
2532
    if (has_single_use (var))
2533
      fprintf (file, " single use.\n");
2534
    else
2535
      fprintf (file, "%d uses.\n", num_imm_uses (var));
2536
 
2537
  FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2538
    {
2539
      if (use_p->stmt == NULL && use_p->use == NULL)
2540
        fprintf (file, "***end of stmt iterator marker***\n");
2541
      else
2542
        if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2543
          print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2544
        else
2545
          print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2546
    }
2547
  fprintf(file, "\n");
2548
}
2549
 
2550
 
2551
/* Dump all the immediate uses to FILE.  */
2552
 
2553
void
2554
dump_immediate_uses (FILE *file)
2555
{
2556
  tree var;
2557
  unsigned int x;
2558
 
2559
  fprintf (file, "Immediate_uses: \n\n");
2560
  for (x = 1; x < num_ssa_names; x++)
2561
    {
2562
      var = ssa_name(x);
2563
      if (!var)
2564
        continue;
2565
      dump_immediate_uses_for (file, var);
2566
    }
2567
}
2568
 
2569
 
2570
/* Dump def-use edges on stderr.  */
2571
 
2572
void
2573
debug_immediate_uses (void)
2574
{
2575
  dump_immediate_uses (stderr);
2576
}
2577
 
2578
 
2579
/* Dump def-use edges on stderr.  */
2580
 
2581
void
2582
debug_immediate_uses_for (tree var)
2583
{
2584
  dump_immediate_uses_for (stderr, var);
2585
}
2586
 
2587
#include "gt-tree-ssa-operands.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.