OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [tree-ssa-operands.c] - Blame information for rev 801

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* SSA operands management for trees.
2
   Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify
8
it under the terms of the GNU General Public License as published by
9
the Free Software Foundation; either version 3, or (at your option)
10
any later version.
11
 
12
GCC is distributed in the hope that it will be useful,
13
but WITHOUT ANY WARRANTY; without even the implied warranty of
14
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15
GNU General Public License for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "tree.h"
26
#include "flags.h"
27
#include "function.h"
28
#include "tree-pretty-print.h"
29
#include "gimple-pretty-print.h"
30
#include "tree-flow.h"
31
#include "tree-inline.h"
32
#include "tree-pass.h"
33
#include "ggc.h"
34
#include "timevar.h"
35
#include "langhooks.h"
36
#include "diagnostic-core.h"
37
 
38
 
39
/* This file contains the code required to manage the operands cache of the
40
   SSA optimizer.  For every stmt, we maintain an operand cache in the stmt
41
   annotation.  This cache contains operands that will be of interest to
42
   optimizers and other passes wishing to manipulate the IL.
43
 
44
   The operand type are broken up into REAL and VIRTUAL operands.  The real
45
   operands are represented as pointers into the stmt's operand tree.  Thus
46
   any manipulation of the real operands will be reflected in the actual tree.
47
   Virtual operands are represented solely in the cache, although the base
48
   variable for the SSA_NAME may, or may not occur in the stmt's tree.
49
   Manipulation of the virtual operands will not be reflected in the stmt tree.
50
 
51
   The routines in this file are concerned with creating this operand cache
52
   from a stmt tree.
53
 
54
   The operand tree is the parsed by the various get_* routines which look
55
   through the stmt tree for the occurrence of operands which may be of
56
   interest, and calls are made to the append_* routines whenever one is
57
   found.  There are 4 of these routines, each representing one of the
58
   4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
59
 
60
   The append_* routines check for duplication, and simply keep a list of
61
   unique objects for each operand type in the build_* extendable vectors.
62
 
63
   Once the stmt tree is completely parsed, the finalize_ssa_operands()
64
   routine is called, which proceeds to perform the finalization routine
65
   on each of the 4 operand vectors which have been built up.
66
 
67
   If the stmt had a previous operand cache, the finalization routines
68
   attempt to match up the new operands with the old ones.  If it's a perfect
69
   match, the old vector is simply reused.  If it isn't a perfect match, then
70
   a new vector is created and the new operands are placed there.  For
71
   virtual operands, if the previous cache had SSA_NAME version of a
72
   variable, and that same variable occurs in the same operands cache, then
73
   the new cache vector will also get the same SSA_NAME.
74
 
75
   i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
76
   operand vector for VUSE, then the new vector will also be modified
77
   such that it contains 'a_5' rather than 'a'.  */
78
 
79
/* Structure storing statistics on how many call clobbers we have, and
80
   how many where avoided.  */
81
 
82
static struct
83
{
84
  /* Number of call-clobbered ops we attempt to add to calls in
85
     add_call_clobbered_mem_symbols.  */
86
  unsigned int clobbered_vars;
87
 
88
  /* Number of write-clobbers (VDEFs) avoided by using
89
     not_written information.  */
90
  unsigned int static_write_clobbers_avoided;
91
 
92
  /* Number of reads (VUSEs) avoided by using not_read information.  */
93
  unsigned int static_read_clobbers_avoided;
94
 
95
  /* Number of write-clobbers avoided because the variable can't escape to
96
     this call.  */
97
  unsigned int unescapable_clobbers_avoided;
98
 
99
  /* Number of read-only uses we attempt to add to calls in
100
     add_call_read_mem_symbols.  */
101
  unsigned int readonly_clobbers;
102
 
103
  /* Number of read-only uses we avoid using not_read information.  */
104
  unsigned int static_readonly_clobbers_avoided;
105
} clobber_stats;
106
 
107
 
108
/* Flags to describe operand properties in helpers.  */
109
 
110
/* By default, operands are loaded.  */
111
#define opf_use         0
112
 
113
/* Operand is the target of an assignment expression or a
114
   call-clobbered variable.  */
115
#define opf_def         (1 << 0)
116
 
117
/* No virtual operands should be created in the expression.  This is used
118
   when traversing ADDR_EXPR nodes which have different semantics than
119
   other expressions.  Inside an ADDR_EXPR node, the only operands that we
120
   need to consider are indices into arrays.  For instance, &a.b[i] should
121
   generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
122
   VUSE for 'b'.  */
123
#define opf_no_vops     (1 << 1)
124
 
125
/* Operand is an implicit reference.  This is used to distinguish
126
   explicit assignments in the form of MODIFY_EXPR from
127
   clobbering sites like function calls or ASM_EXPRs.  */
128
#define opf_implicit    (1 << 2)
129
 
130
/* Operand is in a place where address-taken does not imply addressable.  */
131
#define opf_non_addressable (1 << 3)
132
 
133
/* Operand is in a place where opf_non_addressable does not apply.  */
134
#define opf_not_non_addressable (1 << 4)
135
 
136
/* Array for building all the def operands.  */
137
static VEC(tree,heap) *build_defs;
138
 
139
/* Array for building all the use operands.  */
140
static VEC(tree,heap) *build_uses;
141
 
142
/* The built VDEF operand.  */
143
static tree build_vdef;
144
 
145
/* The built VUSE operand.  */
146
static tree build_vuse;
147
 
148
/* Bitmap obstack for our datastructures that needs to survive across
149
   compilations of multiple functions.  */
150
static bitmap_obstack operands_bitmap_obstack;
151
 
152
static void get_expr_operands (gimple, tree *, int);
153
 
154
/* Number of functions with initialized ssa_operands.  */
155
static int n_initialized = 0;
156
 
157
/* Return the DECL_UID of the base variable of T.  */
158
 
159
static inline unsigned
160
get_name_decl (const_tree t)
161
{
162
  if (TREE_CODE (t) != SSA_NAME)
163
    return DECL_UID (t);
164
  else
165
    return DECL_UID (SSA_NAME_VAR (t));
166
}
167
 
168
 
169
/*  Return true if the SSA operands cache is active.  */
170
 
171
bool
172
ssa_operands_active (void)
173
{
174
  /* This function may be invoked from contexts where CFUN is NULL
175
     (IPA passes), return false for now.  FIXME: operands may be
176
     active in each individual function, maybe this function should
177
     take CFUN as a parameter.  */
178
  if (cfun == NULL)
179
    return false;
180
 
181
  return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
182
}
183
 
184
 
185
/* Create the VOP variable, an artificial global variable to act as a
186
   representative of all of the virtual operands FUD chain.  */
187
 
188
static void
189
create_vop_var (void)
190
{
191
  tree global_var;
192
 
193
  gcc_assert (cfun->gimple_df->vop == NULL_TREE);
194
 
195
  global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
196
                           get_identifier (".MEM"),
197
                           void_type_node);
198
  DECL_ARTIFICIAL (global_var) = 1;
199
  TREE_READONLY (global_var) = 0;
200
  DECL_EXTERNAL (global_var) = 1;
201
  TREE_STATIC (global_var) = 1;
202
  TREE_USED (global_var) = 1;
203
  DECL_CONTEXT (global_var) = NULL_TREE;
204
  TREE_THIS_VOLATILE (global_var) = 0;
205
  TREE_ADDRESSABLE (global_var) = 0;
206
 
207
  create_var_ann (global_var);
208
  add_referenced_var (global_var);
209
  cfun->gimple_df->vop = global_var;
210
}
211
 
212
/* These are the sizes of the operand memory buffer in bytes which gets
213
   allocated each time more operands space is required.  The final value is
214
   the amount that is allocated every time after that.
215
   In 1k we can fit 25 use operands (or 63 def operands) on a host with
216
   8 byte pointers, that would be 10 statements each with 1 def and 2
217
   uses.  */
218
 
219
#define OP_SIZE_INIT    0
220
#define OP_SIZE_1       (1024 - sizeof (void *))
221
#define OP_SIZE_2       (1024 * 4 - sizeof (void *))
222
#define OP_SIZE_3       (1024 * 16 - sizeof (void *))
223
 
224
/* Initialize the operand cache routines.  */
225
 
226
void
227
init_ssa_operands (void)
228
{
229
  if (!n_initialized++)
230
    {
231
      build_defs = VEC_alloc (tree, heap, 5);
232
      build_uses = VEC_alloc (tree, heap, 10);
233
      build_vuse = NULL_TREE;
234
      build_vdef = NULL_TREE;
235
      bitmap_obstack_initialize (&operands_bitmap_obstack);
236
    }
237
 
238
  gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
239
  gimple_ssa_operands (cfun)->operand_memory_index
240
     = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
241
  gimple_ssa_operands (cfun)->ops_active = true;
242
  memset (&clobber_stats, 0, sizeof (clobber_stats));
243
  gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
244
  create_vop_var ();
245
}
246
 
247
 
248
/* Dispose of anything required by the operand routines.  */
249
 
250
void
251
fini_ssa_operands (void)
252
{
253
  struct ssa_operand_memory_d *ptr;
254
 
255
  if (!--n_initialized)
256
    {
257
      VEC_free (tree, heap, build_defs);
258
      VEC_free (tree, heap, build_uses);
259
      build_vdef = NULL_TREE;
260
      build_vuse = NULL_TREE;
261
    }
262
 
263
  gimple_ssa_operands (cfun)->free_defs = NULL;
264
  gimple_ssa_operands (cfun)->free_uses = NULL;
265
 
266
  while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
267
    {
268
      gimple_ssa_operands (cfun)->operand_memory
269
        = gimple_ssa_operands (cfun)->operand_memory->next;
270
      ggc_free (ptr);
271
    }
272
 
273
  gimple_ssa_operands (cfun)->ops_active = false;
274
 
275
  if (!n_initialized)
276
    bitmap_obstack_release (&operands_bitmap_obstack);
277
 
278
  cfun->gimple_df->vop = NULL_TREE;
279
 
280
  if (dump_file && (dump_flags & TDF_STATS))
281
    {
282
      fprintf (dump_file, "Original clobbered vars:           %d\n",
283
               clobber_stats.clobbered_vars);
284
      fprintf (dump_file, "Static write clobbers avoided:     %d\n",
285
               clobber_stats.static_write_clobbers_avoided);
286
      fprintf (dump_file, "Static read clobbers avoided:      %d\n",
287
               clobber_stats.static_read_clobbers_avoided);
288
      fprintf (dump_file, "Unescapable clobbers avoided:      %d\n",
289
               clobber_stats.unescapable_clobbers_avoided);
290
      fprintf (dump_file, "Original read-only clobbers:       %d\n",
291
               clobber_stats.readonly_clobbers);
292
      fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
293
               clobber_stats.static_readonly_clobbers_avoided);
294
    }
295
}
296
 
297
 
298
/* Return memory for an operand of size SIZE.  */
299
 
300
static inline void *
301
ssa_operand_alloc (unsigned size)
302
{
303
  char *ptr;
304
 
305
  gcc_assert (size == sizeof (struct use_optype_d)
306
              || size == sizeof (struct def_optype_d));
307
 
308
  if (gimple_ssa_operands (cfun)->operand_memory_index + size
309
      >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
310
    {
311
      struct ssa_operand_memory_d *ptr;
312
 
313
      switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size)
314
        {
315
        case OP_SIZE_INIT:
316
          gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1;
317
          break;
318
        case OP_SIZE_1:
319
          gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2;
320
          break;
321
        case OP_SIZE_2:
322
        case OP_SIZE_3:
323
          gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3;
324
          break;
325
        default:
326
          gcc_unreachable ();
327
        }
328
 
329
 
330
      ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *)
331
                        + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
332
 
333
      ptr->next = gimple_ssa_operands (cfun)->operand_memory;
334
      gimple_ssa_operands (cfun)->operand_memory = ptr;
335
      gimple_ssa_operands (cfun)->operand_memory_index = 0;
336
    }
337
 
338
  ptr = &(gimple_ssa_operands (cfun)->operand_memory
339
          ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
340
  gimple_ssa_operands (cfun)->operand_memory_index += size;
341
  return ptr;
342
}
343
 
344
 
345
/* Allocate a DEF operand.  */
346
 
347
static inline struct def_optype_d *
348
alloc_def (void)
349
{
350
  struct def_optype_d *ret;
351
  if (gimple_ssa_operands (cfun)->free_defs)
352
    {
353
      ret = gimple_ssa_operands (cfun)->free_defs;
354
      gimple_ssa_operands (cfun)->free_defs
355
        = gimple_ssa_operands (cfun)->free_defs->next;
356
    }
357
  else
358
    ret = (struct def_optype_d *)
359
          ssa_operand_alloc (sizeof (struct def_optype_d));
360
  return ret;
361
}
362
 
363
 
364
/* Allocate a USE operand.  */
365
 
366
static inline struct use_optype_d *
367
alloc_use (void)
368
{
369
  struct use_optype_d *ret;
370
  if (gimple_ssa_operands (cfun)->free_uses)
371
    {
372
      ret = gimple_ssa_operands (cfun)->free_uses;
373
      gimple_ssa_operands (cfun)->free_uses
374
        = gimple_ssa_operands (cfun)->free_uses->next;
375
    }
376
  else
377
    ret = (struct use_optype_d *)
378
          ssa_operand_alloc (sizeof (struct use_optype_d));
379
  return ret;
380
}
381
 
382
 
383
/* Adds OP to the list of defs after LAST.  */
384
 
385
static inline def_optype_p
386
add_def_op (tree *op, def_optype_p last)
387
{
388
  def_optype_p new_def;
389
 
390
  new_def = alloc_def ();
391
  DEF_OP_PTR (new_def) = op;
392
  last->next = new_def;
393
  new_def->next = NULL;
394
  return new_def;
395
}
396
 
397
 
398
/* Adds OP to the list of uses of statement STMT after LAST.  */
399
 
400
static inline use_optype_p
401
add_use_op (gimple stmt, tree *op, use_optype_p last)
402
{
403
  use_optype_p new_use;
404
 
405
  new_use = alloc_use ();
406
  USE_OP_PTR (new_use)->use = op;
407
  link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
408
  last->next = new_use;
409
  new_use->next = NULL;
410
  return new_use;
411
}
412
 
413
 
414
 
415
/* Takes elements from build_defs and turns them into def operands of STMT.
416
   TODO -- Make build_defs VEC of tree *.  */
417
 
418
static inline void
419
finalize_ssa_defs (gimple stmt)
420
{
421
  unsigned new_i;
422
  struct def_optype_d new_list;
423
  def_optype_p old_ops, last;
424
  unsigned int num = VEC_length (tree, build_defs);
425
 
426
  /* There should only be a single real definition per assignment.  */
427
  gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1);
428
 
429
  /* Pre-pend the vdef we may have built.  */
430
  if (build_vdef != NULL_TREE)
431
    {
432
      tree oldvdef = gimple_vdef (stmt);
433
      if (oldvdef
434
          && TREE_CODE (oldvdef) == SSA_NAME)
435
        oldvdef = SSA_NAME_VAR (oldvdef);
436
      if (oldvdef != build_vdef)
437
        gimple_set_vdef (stmt, build_vdef);
438
      VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt));
439
      ++num;
440
    }
441
 
442
  new_list.next = NULL;
443
  last = &new_list;
444
 
445
  old_ops = gimple_def_ops (stmt);
446
 
447
  new_i = 0;
448
 
449
  /* Clear and unlink a no longer necessary VDEF.  */
450
  if (build_vdef == NULL_TREE
451
      && gimple_vdef (stmt) != NULL_TREE)
452
    {
453
      if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
454
        {
455
          unlink_stmt_vdef (stmt);
456
          release_ssa_name (gimple_vdef (stmt));
457
        }
458
      gimple_set_vdef (stmt, NULL_TREE);
459
    }
460
 
461
  /* If we have a non-SSA_NAME VDEF, mark it for renaming.  */
462
  if (gimple_vdef (stmt)
463
      && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
464
    mark_sym_for_renaming (gimple_vdef (stmt));
465
 
466
  /* Check for the common case of 1 def that hasn't changed.  */
467
  if (old_ops && old_ops->next == NULL && num == 1
468
      && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
469
    return;
470
 
471
  /* If there is anything in the old list, free it.  */
472
  if (old_ops)
473
    {
474
      old_ops->next = gimple_ssa_operands (cfun)->free_defs;
475
      gimple_ssa_operands (cfun)->free_defs = old_ops;
476
    }
477
 
478
  /* If there is anything remaining in the build_defs list, simply emit it.  */
479
  for ( ; new_i < num; new_i++)
480
    last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
481
 
482
  /* Now set the stmt's operands.  */
483
  gimple_set_def_ops (stmt, new_list.next);
484
}
485
 
486
 
487
/* Takes elements from build_uses and turns them into use operands of STMT.
488
   TODO -- Make build_uses VEC of tree *.  */
489
 
490
static inline void
491
finalize_ssa_uses (gimple stmt)
492
{
493
  unsigned new_i;
494
  struct use_optype_d new_list;
495
  use_optype_p old_ops, ptr, last;
496
 
497
  /* Pre-pend the VUSE we may have built.  */
498
  if (build_vuse != NULL_TREE)
499
    {
500
      tree oldvuse = gimple_vuse (stmt);
501
      if (oldvuse
502
          && TREE_CODE (oldvuse) == SSA_NAME)
503
        oldvuse = SSA_NAME_VAR (oldvuse);
504
      if (oldvuse != (build_vuse != NULL_TREE
505
                      ? build_vuse : build_vdef))
506
        gimple_set_vuse (stmt, NULL_TREE);
507
      VEC_safe_insert (tree, heap, build_uses, 0, (tree)gimple_vuse_ptr (stmt));
508
    }
509
 
510
  new_list.next = NULL;
511
  last = &new_list;
512
 
513
  old_ops = gimple_use_ops (stmt);
514
 
515
  /* Clear a no longer necessary VUSE.  */
516
  if (build_vuse == NULL_TREE
517
      && gimple_vuse (stmt) != NULL_TREE)
518
    gimple_set_vuse (stmt, NULL_TREE);
519
 
520
  /* If there is anything in the old list, free it.  */
521
  if (old_ops)
522
    {
523
      for (ptr = old_ops; ptr; ptr = ptr->next)
524
        delink_imm_use (USE_OP_PTR (ptr));
525
      old_ops->next = gimple_ssa_operands (cfun)->free_uses;
526
      gimple_ssa_operands (cfun)->free_uses = old_ops;
527
    }
528
 
529
  /* If we added a VUSE, make sure to set the operand if it is not already
530
     present and mark it for renaming.  */
531
  if (build_vuse != NULL_TREE
532
      && gimple_vuse (stmt) == NULL_TREE)
533
    {
534
      gimple_set_vuse (stmt, gimple_vop (cfun));
535
      mark_sym_for_renaming (gimple_vop (cfun));
536
    }
537
 
538
  /* Now create nodes for all the new nodes.  */
539
  for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
540
    last = add_use_op (stmt,
541
                       (tree *) VEC_index (tree, build_uses, new_i),
542
                       last);
543
 
544
  /* Now set the stmt's operands.  */
545
  gimple_set_use_ops (stmt, new_list.next);
546
}
547
 
548
 
549
/* Clear the in_list bits and empty the build array for VDEFs and
550
   VUSEs.  */
551
 
552
static inline void
553
cleanup_build_arrays (void)
554
{
555
  build_vdef = NULL_TREE;
556
  build_vuse = NULL_TREE;
557
  VEC_truncate (tree, build_defs, 0);
558
  VEC_truncate (tree, build_uses, 0);
559
}
560
 
561
 
562
/* Finalize all the build vectors, fill the new ones into INFO.  */
563
 
564
static inline void
565
finalize_ssa_stmt_operands (gimple stmt)
566
{
567
  finalize_ssa_defs (stmt);
568
  finalize_ssa_uses (stmt);
569
  cleanup_build_arrays ();
570
}
571
 
572
 
573
/* Start the process of building up operands vectors in INFO.  */
574
 
575
static inline void
576
start_ssa_stmt_operands (void)
577
{
578
  gcc_assert (VEC_length (tree, build_defs) == 0);
579
  gcc_assert (VEC_length (tree, build_uses) == 0);
580
  gcc_assert (build_vuse == NULL_TREE);
581
  gcc_assert (build_vdef == NULL_TREE);
582
}
583
 
584
 
585
/* Add DEF_P to the list of pointers to operands.  */
586
 
587
static inline void
588
append_def (tree *def_p)
589
{
590
  VEC_safe_push (tree, heap, build_defs, (tree) def_p);
591
}
592
 
593
 
594
/* Add USE_P to the list of pointers to operands.  */
595
 
596
static inline void
597
append_use (tree *use_p)
598
{
599
  VEC_safe_push (tree, heap, build_uses, (tree) use_p);
600
}
601
 
602
 
603
/* Add VAR to the set of variables that require a VDEF operator.  */
604
 
605
static inline void
606
append_vdef (tree var)
607
{
608
  if (!optimize)
609
    return;
610
 
611
  gcc_assert ((build_vdef == NULL_TREE
612
               || build_vdef == var)
613
              && (build_vuse == NULL_TREE
614
                  || build_vuse == var));
615
 
616
  build_vdef = var;
617
  build_vuse = var;
618
}
619
 
620
 
621
/* Add VAR to the set of variables that require a VUSE operator.  */
622
 
623
static inline void
624
append_vuse (tree var)
625
{
626
  if (!optimize)
627
    return;
628
 
629
  gcc_assert (build_vuse == NULL_TREE
630
              || build_vuse == var);
631
 
632
  build_vuse = var;
633
}
634
 
635
/* Add virtual operands for STMT.  FLAGS is as in get_expr_operands.  */
636
 
637
static void
638
add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
639
{
640
  /* Add virtual operands to the stmt, unless the caller has specifically
641
     requested not to do that (used when adding operands inside an
642
     ADDR_EXPR expression).  */
643
  if (flags & opf_no_vops)
644
    return;
645
 
646
  gcc_assert (!is_gimple_debug (stmt));
647
 
648
  if (flags & opf_def)
649
    append_vdef (gimple_vop (cfun));
650
  else
651
    append_vuse (gimple_vop (cfun));
652
}
653
 
654
 
655
/* Add *VAR_P to the appropriate operand array for statement STMT.
656
   FLAGS is as in get_expr_operands.  If *VAR_P is a GIMPLE register,
657
   it will be added to the statement's real operands, otherwise it is
658
   added to virtual operands.  */
659
 
660
static void
661
add_stmt_operand (tree *var_p, gimple stmt, int flags)
662
{
663
  tree var, sym;
664
 
665
  gcc_assert (SSA_VAR_P (*var_p));
666
 
667
  var = *var_p;
668
  sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
669
 
670
  /* Mark statements with volatile operands.  */
671
  if (!(flags & opf_no_vops)
672
      && TREE_THIS_VOLATILE (sym))
673
    gimple_set_has_volatile_ops (stmt, true);
674
 
675
  if (is_gimple_reg (sym))
676
    {
677
      /* The variable is a GIMPLE register.  Add it to real operands.  */
678
      if (flags & opf_def)
679
        append_def (var_p);
680
      else
681
        append_use (var_p);
682
    }
683
  else
684
    add_virtual_operand (stmt, flags);
685
}
686
 
687
/* Mark the base address of REF as having its address taken.
688
   REF may be a single variable whose address has been taken or any
689
   other valid GIMPLE memory reference (structure reference, array,
690
   etc).  */
691
 
692
static void
693
mark_address_taken (tree ref)
694
{
695
  tree var;
696
 
697
  /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
698
     as the only thing we take the address of.  If VAR is a structure,
699
     taking the address of a field means that the whole structure may
700
     be referenced using pointer arithmetic.  See PR 21407 and the
701
     ensuing mailing list discussion.  */
702
  var = get_base_address (ref);
703
  if (var)
704
    {
705
      if (DECL_P (var))
706
        TREE_ADDRESSABLE (var) = 1;
707
      else if (TREE_CODE (var) == MEM_REF
708
               && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
709
               && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
710
        TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
711
    }
712
}
713
 
714
 
715
/* A subroutine of get_expr_operands to handle MEM_REF.
716
 
717
   STMT is the statement being processed, EXPR is the MEM_REF
718
      that got us here.
719
 
720
   FLAGS is as in get_expr_operands.
721
 
722
   RECURSE_ON_BASE should be set to true if we want to continue
723
      calling get_expr_operands on the base pointer, and false if
724
      something else will do it for us.  */
725
 
726
static void
727
get_indirect_ref_operands (gimple stmt, tree expr, int flags,
728
                           bool recurse_on_base)
729
{
730
  tree *pptr = &TREE_OPERAND (expr, 0);
731
 
732
  if (!(flags & opf_no_vops)
733
      && TREE_THIS_VOLATILE (expr))
734
    gimple_set_has_volatile_ops (stmt, true);
735
 
736
  /* Add the VOP.  */
737
  add_virtual_operand (stmt, flags);
738
 
739
  /* If requested, add a USE operand for the base pointer.  */
740
  if (recurse_on_base)
741
    get_expr_operands (stmt, pptr,
742
                       opf_non_addressable | opf_use
743
                       | (flags & (opf_no_vops|opf_not_non_addressable)));
744
}
745
 
746
 
747
/* A subroutine of get_expr_operands to handle TARGET_MEM_REF.  */
748
 
749
static void
750
get_tmr_operands (gimple stmt, tree expr, int flags)
751
{
752
  if (!(flags & opf_no_vops)
753
      && TREE_THIS_VOLATILE (expr))
754
    gimple_set_has_volatile_ops (stmt, true);
755
 
756
  /* First record the real operands.  */
757
  get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
758
  get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
759
  get_expr_operands (stmt, &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
760
 
761
  add_virtual_operand (stmt, flags);
762
}
763
 
764
 
765
/* If STMT is a call that may clobber globals and other symbols that
766
   escape, add them to the VDEF/VUSE lists for it.  */
767
 
768
static void
769
maybe_add_call_vops (gimple stmt)
770
{
771
  int call_flags = gimple_call_flags (stmt);
772
 
773
  /* If aliases have been computed already, add VDEF or VUSE
774
     operands for all the symbols that have been found to be
775
     call-clobbered.  */
776
  if (!(call_flags & ECF_NOVOPS))
777
    {
778
      /* A 'pure' or a 'const' function never call-clobbers anything.
779
         A 'noreturn' function might, but since we don't return anyway
780
         there is no point in recording that.  */
781
      if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
782
        add_virtual_operand (stmt, opf_def);
783
      else if (!(call_flags & ECF_CONST))
784
        add_virtual_operand (stmt, opf_use);
785
    }
786
}
787
 
788
 
789
/* Scan operands in the ASM_EXPR stmt referred to in INFO.  */
790
 
791
static void
792
get_asm_expr_operands (gimple stmt)
793
{
794
  size_t i, noutputs;
795
  const char **oconstraints;
796
  const char *constraint;
797
  bool allows_mem, allows_reg, is_inout;
798
 
799
  noutputs = gimple_asm_noutputs (stmt);
800
  oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
801
 
802
  /* Gather all output operands.  */
803
  for (i = 0; i < gimple_asm_noutputs (stmt); i++)
804
    {
805
      tree link = gimple_asm_output_op (stmt, i);
806
      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
807
      oconstraints[i] = constraint;
808
      parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
809
                               &allows_reg, &is_inout);
810
 
811
      /* This should have been split in gimplify_asm_expr.  */
812
      gcc_assert (!allows_reg || !is_inout);
813
 
814
      /* Memory operands are addressable.  Note that STMT needs the
815
         address of this operand.  */
816
      if (!allows_reg && allows_mem)
817
        mark_address_taken (TREE_VALUE (link));
818
 
819
      get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable);
820
    }
821
 
822
  /* Gather all input operands.  */
823
  for (i = 0; i < gimple_asm_ninputs (stmt); i++)
824
    {
825
      tree link = gimple_asm_input_op (stmt, i);
826
      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
827
      parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
828
                              &allows_mem, &allows_reg);
829
 
830
      /* Memory operands are addressable.  Note that STMT needs the
831
         address of this operand.  */
832
      if (!allows_reg && allows_mem)
833
        mark_address_taken (TREE_VALUE (link));
834
 
835
      get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable);
836
    }
837
 
838
  /* Clobber all memory and addressable symbols for asm ("" : : : "memory");  */
839
  if (gimple_asm_clobbers_memory_p (stmt))
840
    add_virtual_operand (stmt, opf_def);
841
}
842
 
843
 
844
/* Recursively scan the expression pointed to by EXPR_P in statement
845
   STMT.  FLAGS is one of the OPF_* constants modifying how to
846
   interpret the operands found.  */
847
 
848
static void
849
get_expr_operands (gimple stmt, tree *expr_p, int flags)
850
{
851
  enum tree_code code;
852
  enum tree_code_class codeclass;
853
  tree expr = *expr_p;
854
  int uflags = opf_use;
855
 
856
  if (expr == NULL)
857
    return;
858
 
859
  if (is_gimple_debug (stmt))
860
    uflags |= (flags & opf_no_vops);
861
 
862
  code = TREE_CODE (expr);
863
  codeclass = TREE_CODE_CLASS (code);
864
 
865
  switch (code)
866
    {
867
    case ADDR_EXPR:
868
      /* Taking the address of a variable does not represent a
869
         reference to it, but the fact that the statement takes its
870
         address will be of interest to some passes (e.g. alias
871
         resolution).  */
872
      if ((!(flags & opf_non_addressable)
873
           || (flags & opf_not_non_addressable))
874
          && !is_gimple_debug (stmt))
875
        mark_address_taken (TREE_OPERAND (expr, 0));
876
 
877
      /* If the address is invariant, there may be no interesting
878
         variable references inside.  */
879
      if (is_gimple_min_invariant (expr))
880
        return;
881
 
882
      /* Otherwise, there may be variables referenced inside but there
883
         should be no VUSEs created, since the referenced objects are
884
         not really accessed.  The only operands that we should find
885
         here are ARRAY_REF indices which will always be real operands
886
         (GIMPLE does not allow non-registers as array indices).  */
887
      flags |= opf_no_vops;
888
      get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
889
                         flags | opf_not_non_addressable);
890
      return;
891
 
892
    case SSA_NAME:
893
     add_stmt_operand (expr_p, stmt, flags);
894
     return;
895
 
896
    case VAR_DECL:
897
    case PARM_DECL:
898
    case RESULT_DECL:
899
      add_stmt_operand (expr_p, stmt, flags);
900
      return;
901
 
902
    case DEBUG_EXPR_DECL:
903
      gcc_assert (gimple_debug_bind_p (stmt));
904
      return;
905
 
906
    case MEM_REF:
907
      get_indirect_ref_operands (stmt, expr, flags, true);
908
      return;
909
 
910
    case TARGET_MEM_REF:
911
      get_tmr_operands (stmt, expr, flags);
912
      return;
913
 
914
    case ARRAY_REF:
915
    case ARRAY_RANGE_REF:
916
    case COMPONENT_REF:
917
    case REALPART_EXPR:
918
    case IMAGPART_EXPR:
919
      {
920
        if (!(flags & opf_no_vops)
921
            && TREE_THIS_VOLATILE (expr))
922
          gimple_set_has_volatile_ops (stmt, true);
923
 
924
        get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
925
 
926
        if (code == COMPONENT_REF)
927
          {
928
            if (!(flags & opf_no_vops)
929
                && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
930
              gimple_set_has_volatile_ops (stmt, true);
931
            get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
932
          }
933
        else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
934
          {
935
            get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
936
            get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
937
            get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags);
938
          }
939
 
940
        return;
941
      }
942
 
943
    case WITH_SIZE_EXPR:
944
      /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
945
         and an rvalue reference to its second argument.  */
946
      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
947
      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
948
      return;
949
 
950
    case COND_EXPR:
951
    case VEC_COND_EXPR:
952
    case VEC_PERM_EXPR:
953
      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags);
954
      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
955
      get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
956
      return;
957
 
958
    case CONSTRUCTOR:
959
      {
960
        /* General aggregate CONSTRUCTORs have been decomposed, but they
961
           are still in use as the COMPLEX_EXPR equivalent for vectors.  */
962
        constructor_elt *ce;
963
        unsigned HOST_WIDE_INT idx;
964
 
965
        /* A volatile constructor is actually TREE_CLOBBER_P, transfer
966
           the volatility to the statement, don't use TREE_CLOBBER_P for
967
           mirroring the other uses of THIS_VOLATILE in this file.  */
968
        if (!(flags & opf_no_vops)
969
            && TREE_THIS_VOLATILE (expr))
970
          gimple_set_has_volatile_ops (stmt, true);
971
 
972
        for (idx = 0;
973
             VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
974
             idx++)
975
          get_expr_operands (stmt, &ce->value, uflags);
976
 
977
        return;
978
      }
979
 
980
    case BIT_FIELD_REF:
981
      if (!(flags & opf_no_vops)
982
          && TREE_THIS_VOLATILE (expr))
983
        gimple_set_has_volatile_ops (stmt, true);
984
      /* FALLTHRU */
985
 
986
    case VIEW_CONVERT_EXPR:
987
    do_unary:
988
      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
989
      return;
990
 
991
    case COMPOUND_EXPR:
992
    case OBJ_TYPE_REF:
993
    case ASSERT_EXPR:
994
    do_binary:
995
      {
996
        get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
997
        get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
998
        return;
999
      }
1000
 
1001
    case DOT_PROD_EXPR:
1002
    case REALIGN_LOAD_EXPR:
1003
    case WIDEN_MULT_PLUS_EXPR:
1004
    case WIDEN_MULT_MINUS_EXPR:
1005
    case FMA_EXPR:
1006
      {
1007
        get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1008
        get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1009
        get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1010
        return;
1011
      }
1012
 
1013
    case FUNCTION_DECL:
1014
    case LABEL_DECL:
1015
    case CONST_DECL:
1016
    case CASE_LABEL_EXPR:
1017
      /* Expressions that make no memory references.  */
1018
      return;
1019
 
1020
    default:
1021
      if (codeclass == tcc_unary)
1022
        goto do_unary;
1023
      if (codeclass == tcc_binary || codeclass == tcc_comparison)
1024
        goto do_binary;
1025
      if (codeclass == tcc_constant || codeclass == tcc_type)
1026
        return;
1027
    }
1028
 
1029
  /* If we get here, something has gone wrong.  */
1030
#ifdef ENABLE_CHECKING
1031
  fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1032
  debug_tree (expr);
1033
  fputs ("\n", stderr);
1034
#endif
1035
  gcc_unreachable ();
1036
}
1037
 
1038
 
1039
/* Parse STMT looking for operands.  When finished, the various
1040
   build_* operand vectors will have potential operands in them.  */
1041
 
1042
static void
1043
parse_ssa_operands (gimple stmt)
1044
{
1045
  enum gimple_code code = gimple_code (stmt);
1046
  size_t i, n, start = 0;
1047
 
1048
  switch (code)
1049
    {
1050
    case GIMPLE_ASM:
1051
      get_asm_expr_operands (stmt);
1052
      break;
1053
 
1054
    case GIMPLE_TRANSACTION:
1055
      /* The start of a transaction is a memory barrier.  */
1056
      add_virtual_operand (stmt, opf_def | opf_use);
1057
      break;
1058
 
1059
    case GIMPLE_DEBUG:
1060
      if (gimple_debug_bind_p (stmt)
1061
          && gimple_debug_bind_has_value_p (stmt))
1062
        get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt),
1063
                           opf_use | opf_no_vops);
1064
      break;
1065
 
1066
    case GIMPLE_RETURN:
1067
      append_vuse (gimple_vop (cfun));
1068
      goto do_default;
1069
 
1070
    case GIMPLE_CALL:
1071
      /* Add call-clobbered operands, if needed.  */
1072
      maybe_add_call_vops (stmt);
1073
      /* FALLTHRU */
1074
 
1075
    case GIMPLE_ASSIGN:
1076
      get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
1077
      start = 1;
1078
      /* FALLTHRU */
1079
 
1080
    default:
1081
    do_default:
1082
      n = gimple_num_ops (stmt);
1083
      for (i = start; i < n; i++)
1084
        get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
1085
      break;
1086
    }
1087
}
1088
 
1089
 
1090
/* Create an operands cache for STMT.  */
1091
 
1092
static void
1093
build_ssa_operands (gimple stmt)
1094
{
1095
  /* Initially assume that the statement has no volatile operands.  */
1096
  gimple_set_has_volatile_ops (stmt, false);
1097
 
1098
  start_ssa_stmt_operands ();
1099
  parse_ssa_operands (stmt);
1100
  finalize_ssa_stmt_operands (stmt);
1101
}
1102
 
1103
/* Verifies SSA statement operands.  */
1104
 
1105
DEBUG_FUNCTION bool
1106
verify_ssa_operands (gimple stmt)
1107
{
1108
  use_operand_p use_p;
1109
  def_operand_p def_p;
1110
  ssa_op_iter iter;
1111
  unsigned i;
1112
  tree use, def;
1113
  bool volatile_p = gimple_has_volatile_ops (stmt);
1114
 
1115
  /* build_ssa_operands w/o finalizing them.  */
1116
  gimple_set_has_volatile_ops (stmt, false);
1117
  start_ssa_stmt_operands ();
1118
  parse_ssa_operands (stmt);
1119
 
1120
  /* Now verify the built operands are the same as present in STMT.  */
1121
  def = gimple_vdef (stmt);
1122
  if (def
1123
      && TREE_CODE (def) == SSA_NAME)
1124
    def = SSA_NAME_VAR (def);
1125
  if (build_vdef != def)
1126
    {
1127
      error ("virtual definition of statement not up-to-date");
1128
      return true;
1129
    }
1130
  if (gimple_vdef (stmt)
1131
      && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
1132
          || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
1133
    {
1134
      error ("virtual def operand missing for stmt");
1135
      return true;
1136
    }
1137
 
1138
  use = gimple_vuse (stmt);
1139
  if (use
1140
      && TREE_CODE (use) == SSA_NAME)
1141
    use = SSA_NAME_VAR (use);
1142
  if (build_vuse != use)
1143
    {
1144
      error ("virtual use of statement not up-to-date");
1145
      return true;
1146
    }
1147
  if (gimple_vuse (stmt)
1148
      && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
1149
          || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
1150
    {
1151
      error ("virtual use operand missing for stmt");
1152
      return true;
1153
    }
1154
 
1155
  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1156
    {
1157
      FOR_EACH_VEC_ELT (tree, build_uses, i, use)
1158
        {
1159
          if (use_p->use == (tree *)use)
1160
            {
1161
              VEC_replace (tree, build_uses, i, NULL_TREE);
1162
              break;
1163
            }
1164
        }
1165
      if (i == VEC_length (tree, build_uses))
1166
        {
1167
          error ("excess use operand for stmt");
1168
          debug_generic_expr (USE_FROM_PTR (use_p));
1169
          return true;
1170
        }
1171
    }
1172
  FOR_EACH_VEC_ELT (tree, build_uses, i, use)
1173
    if (use != NULL_TREE)
1174
      {
1175
        error ("use operand missing for stmt");
1176
        debug_generic_expr (*(tree *)use);
1177
        return true;
1178
      }
1179
 
1180
  FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF)
1181
    {
1182
      FOR_EACH_VEC_ELT (tree, build_defs, i, def)
1183
        {
1184
          if (def_p == (tree *)def)
1185
            {
1186
              VEC_replace (tree, build_defs, i, NULL_TREE);
1187
              break;
1188
            }
1189
        }
1190
      if (i == VEC_length (tree, build_defs))
1191
        {
1192
          error ("excess def operand for stmt");
1193
          debug_generic_expr (DEF_FROM_PTR (def_p));
1194
          return true;
1195
        }
1196
    }
1197
  FOR_EACH_VEC_ELT (tree, build_defs, i, def)
1198
    if (def != NULL_TREE)
1199
      {
1200
        error ("def operand missing for stmt");
1201
        debug_generic_expr (*(tree *)def);
1202
        return true;
1203
      }
1204
 
1205
  if (gimple_has_volatile_ops (stmt) != volatile_p)
1206
    {
1207
      error ("stmt volatile flag not up-to-date");
1208
      return true;
1209
    }
1210
 
1211
  cleanup_build_arrays ();
1212
  return false;
1213
}
1214
 
1215
 
1216
/* Releases the operands of STMT back to their freelists, and clears
1217
   the stmt operand lists.  */
1218
 
1219
void
1220
free_stmt_operands (gimple stmt)
1221
{
1222
  def_optype_p defs = gimple_def_ops (stmt), last_def;
1223
  use_optype_p uses = gimple_use_ops (stmt), last_use;
1224
 
1225
  if (defs)
1226
    {
1227
      for (last_def = defs; last_def->next; last_def = last_def->next)
1228
        continue;
1229
      last_def->next = gimple_ssa_operands (cfun)->free_defs;
1230
      gimple_ssa_operands (cfun)->free_defs = defs;
1231
      gimple_set_def_ops (stmt, NULL);
1232
    }
1233
 
1234
  if (uses)
1235
    {
1236
      for (last_use = uses; last_use->next; last_use = last_use->next)
1237
        delink_imm_use (USE_OP_PTR (last_use));
1238
      delink_imm_use (USE_OP_PTR (last_use));
1239
      last_use->next = gimple_ssa_operands (cfun)->free_uses;
1240
      gimple_ssa_operands (cfun)->free_uses = uses;
1241
      gimple_set_use_ops (stmt, NULL);
1242
    }
1243
 
1244
  if (gimple_has_mem_ops (stmt))
1245
    {
1246
      gimple_set_vuse (stmt, NULL_TREE);
1247
      gimple_set_vdef (stmt, NULL_TREE);
1248
    }
1249
}
1250
 
1251
 
1252
/* Get the operands of statement STMT.  */
1253
 
1254
void
1255
update_stmt_operands (gimple stmt)
1256
{
1257
  /* If update_stmt_operands is called before SSA is initialized, do
1258
     nothing.  */
1259
  if (!ssa_operands_active ())
1260
    return;
1261
 
1262
  timevar_push (TV_TREE_OPS);
1263
 
1264
  /* If the stmt is a noreturn call queue it to be processed by
1265
     split_bbs_on_noreturn_calls during cfg cleanup.  */
1266
  if (is_gimple_call (stmt)
1267
      && gimple_call_noreturn_p (stmt))
1268
    VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), stmt);
1269
 
1270
  gcc_assert (gimple_modified_p (stmt));
1271
  build_ssa_operands (stmt);
1272
  gimple_set_modified (stmt, false);
1273
 
1274
  timevar_pop (TV_TREE_OPS);
1275
}
1276
 
1277
 
1278
/* Swap operands EXP0 and EXP1 in statement STMT.  No attempt is done
1279
   to test the validity of the swap operation.  */
1280
 
1281
void
1282
swap_tree_operands (gimple stmt, tree *exp0, tree *exp1)
1283
{
1284
  tree op0, op1;
1285
  op0 = *exp0;
1286
  op1 = *exp1;
1287
 
1288
  /* If the operand cache is active, attempt to preserve the relative
1289
     positions of these two operands in their respective immediate use
1290
     lists by adjusting their use pointer to point to the new
1291
     operand position.  */
1292
  if (ssa_operands_active () && op0 != op1)
1293
    {
1294
      use_optype_p use0, use1, ptr;
1295
      use0 = use1 = NULL;
1296
 
1297
      /* Find the 2 operands in the cache, if they are there.  */
1298
      for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1299
        if (USE_OP_PTR (ptr)->use == exp0)
1300
          {
1301
            use0 = ptr;
1302
            break;
1303
          }
1304
 
1305
      for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1306
        if (USE_OP_PTR (ptr)->use == exp1)
1307
          {
1308
            use1 = ptr;
1309
            break;
1310
          }
1311
 
1312
      /* And adjust their location to point to the new position of the
1313
         operand.  */
1314
      if (use0)
1315
        USE_OP_PTR (use0)->use = exp1;
1316
      if (use1)
1317
        USE_OP_PTR (use1)->use = exp0;
1318
    }
1319
 
1320
  /* Now swap the data.  */
1321
  *exp0 = op1;
1322
  *exp1 = op0;
1323
}
1324
 
1325
 
1326
/* Scan the immediate_use list for VAR making sure its linked properly.
1327
   Return TRUE if there is a problem and emit an error message to F.  */
1328
 
1329
DEBUG_FUNCTION bool
1330
verify_imm_links (FILE *f, tree var)
1331
{
1332
  use_operand_p ptr, prev, list;
1333
  int count;
1334
 
1335
  gcc_assert (TREE_CODE (var) == SSA_NAME);
1336
 
1337
  list = &(SSA_NAME_IMM_USE_NODE (var));
1338
  gcc_assert (list->use == NULL);
1339
 
1340
  if (list->prev == NULL)
1341
    {
1342
      gcc_assert (list->next == NULL);
1343
      return false;
1344
    }
1345
 
1346
  prev = list;
1347
  count = 0;
1348
  for (ptr = list->next; ptr != list; )
1349
    {
1350
      if (prev != ptr->prev)
1351
        goto error;
1352
 
1353
      if (ptr->use == NULL)
1354
        goto error; /* 2 roots, or SAFE guard node.  */
1355
      else if (*(ptr->use) != var)
1356
        goto error;
1357
 
1358
      prev = ptr;
1359
      ptr = ptr->next;
1360
 
1361
      /* Avoid infinite loops.  50,000,000 uses probably indicates a
1362
         problem.  */
1363
      if (count++ > 50000000)
1364
        goto error;
1365
    }
1366
 
1367
  /* Verify list in the other direction.  */
1368
  prev = list;
1369
  for (ptr = list->prev; ptr != list; )
1370
    {
1371
      if (prev != ptr->next)
1372
        goto error;
1373
      prev = ptr;
1374
      ptr = ptr->prev;
1375
      if (count-- < 0)
1376
        goto error;
1377
    }
1378
 
1379
  if (count != 0)
1380
    goto error;
1381
 
1382
  return false;
1383
 
1384
 error:
1385
  if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1386
    {
1387
      fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1388
      print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1389
    }
1390
  fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1391
           (void *)ptr->use);
1392
  print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1393
  fprintf(f, "\n");
1394
  return true;
1395
}
1396
 
1397
 
1398
/* Dump all the immediate uses to FILE.  */
1399
 
1400
void
1401
dump_immediate_uses_for (FILE *file, tree var)
1402
{
1403
  imm_use_iterator iter;
1404
  use_operand_p use_p;
1405
 
1406
  gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1407
 
1408
  print_generic_expr (file, var, TDF_SLIM);
1409
  fprintf (file, " : -->");
1410
  if (has_zero_uses (var))
1411
    fprintf (file, " no uses.\n");
1412
  else
1413
    if (has_single_use (var))
1414
      fprintf (file, " single use.\n");
1415
    else
1416
      fprintf (file, "%d uses.\n", num_imm_uses (var));
1417
 
1418
  FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1419
    {
1420
      if (use_p->loc.stmt == NULL && use_p->use == NULL)
1421
        fprintf (file, "***end of stmt iterator marker***\n");
1422
      else
1423
        if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1424
          print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1425
        else
1426
          print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1427
    }
1428
  fprintf(file, "\n");
1429
}
1430
 
1431
 
1432
/* Dump all the immediate uses to FILE.  */
1433
 
1434
void
1435
dump_immediate_uses (FILE *file)
1436
{
1437
  tree var;
1438
  unsigned int x;
1439
 
1440
  fprintf (file, "Immediate_uses: \n\n");
1441
  for (x = 1; x < num_ssa_names; x++)
1442
    {
1443
      var = ssa_name(x);
1444
      if (!var)
1445
        continue;
1446
      dump_immediate_uses_for (file, var);
1447
    }
1448
}
1449
 
1450
 
1451
/* Dump def-use edges on stderr.  */
1452
 
1453
DEBUG_FUNCTION void
1454
debug_immediate_uses (void)
1455
{
1456
  dump_immediate_uses (stderr);
1457
}
1458
 
1459
 
1460
/* Dump def-use edges on stderr.  */
1461
 
1462
DEBUG_FUNCTION void
1463
debug_immediate_uses_for (tree var)
1464
{
1465
  dump_immediate_uses_for (stderr, var);
1466
}
1467
 
1468
 
1469
/* Unlink STMTs virtual definition from the IL by propagating its use.  */
1470
 
1471
void
1472
unlink_stmt_vdef (gimple stmt)
1473
{
1474
  use_operand_p use_p;
1475
  imm_use_iterator iter;
1476
  gimple use_stmt;
1477
  tree vdef = gimple_vdef (stmt);
1478
 
1479
  if (!vdef
1480
      || TREE_CODE (vdef) != SSA_NAME)
1481
    return;
1482
 
1483
  FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (stmt))
1484
    {
1485
      FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1486
        SET_USE (use_p, gimple_vuse (stmt));
1487
    }
1488
 
1489
  if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)))
1490
    SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vuse (stmt)) = 1;
1491
}
1492
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.