OpenCores
URL https://opencores.org/ocsvn/altor32/altor32/trunk

Subversion Repositories altor32

[/] [altor32/] [trunk/] [gcc-x64/] [or1knd-elf/] [lib/] [gcc/] [or1knd-elf/] [4.8.0/] [plugin/] [include/] [tree-flow-inline.h] - Blame information for rev 35

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 35 ultra_embe
/* Inline functions for tree-flow.h
2
   Copyright (C) 2001, 2003, 2005, 2006, 2007, 2008, 2010
3
   Free Software Foundation, Inc.
4
   Contributed by Diego Novillo <dnovillo@redhat.com>
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify
9
it under the terms of the GNU General Public License as published by
10
the Free Software Foundation; either version 3, or (at your option)
11
any later version.
12
 
13
GCC is distributed in the hope that it will be useful,
14
but WITHOUT ANY WARRANTY; without even the implied warranty of
15
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16
GNU General Public License for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#ifndef _TREE_FLOW_INLINE_H
23
#define _TREE_FLOW_INLINE_H 1
24
 
25
/* Inline functions for manipulating various data structures defined in
26
   tree-flow.h.  See tree-flow.h for documentation.  */
27
 
28
/* Return true when gimple SSA form was built.
29
   gimple_in_ssa_p is queried by gimplifier in various early stages before SSA
30
   infrastructure is initialized.  Check for presence of the datastructures
31
   at first place.  */
32
static inline bool
33
gimple_in_ssa_p (const struct function *fun)
34
{
35
  return fun && fun->gimple_df && fun->gimple_df->in_ssa_p;
36
}
37
 
38
/* Artificial variable used for the virtual operand FUD chain.  */
39
static inline tree
40
gimple_vop (const struct function *fun)
41
{
42
  gcc_checking_assert (fun && fun->gimple_df);
43
  return fun->gimple_df->vop;
44
}
45
 
46
/* Initialize the hashtable iterator HTI to point to hashtable TABLE */
47
 
48
static inline void *
49
first_htab_element (htab_iterator *hti, htab_t table)
50
{
51
  hti->htab = table;
52
  hti->slot = table->entries;
53
  hti->limit = hti->slot + htab_size (table);
54
  do
55
    {
56
      PTR x = *(hti->slot);
57
      if (x != HTAB_EMPTY_ENTRY && x != HTAB_DELETED_ENTRY)
58
        break;
59
    } while (++(hti->slot) < hti->limit);
60
 
61
  if (hti->slot < hti->limit)
62
    return *(hti->slot);
63
  return NULL;
64
}
65
 
66
/* Return current non-empty/deleted slot of the hashtable pointed to by HTI,
67
   or NULL if we have  reached the end.  */
68
 
69
static inline bool
70
end_htab_p (const htab_iterator *hti)
71
{
72
  if (hti->slot >= hti->limit)
73
    return true;
74
  return false;
75
}
76
 
77
/* Advance the hashtable iterator pointed to by HTI to the next element of the
78
   hashtable.  */
79
 
80
static inline void *
81
next_htab_element (htab_iterator *hti)
82
{
83
  while (++(hti->slot) < hti->limit)
84
    {
85
      PTR x = *(hti->slot);
86
      if (x != HTAB_EMPTY_ENTRY && x != HTAB_DELETED_ENTRY)
87
        return x;
88
    };
89
  return NULL;
90
}
91
 
92
/* Get the number of the next statement uid to be allocated.  */
93
static inline unsigned int
94
gimple_stmt_max_uid (struct function *fn)
95
{
96
  return fn->last_stmt_uid;
97
}
98
 
99
/* Set the number of the next statement uid to be allocated.  */
100
static inline void
101
set_gimple_stmt_max_uid (struct function *fn, unsigned int maxid)
102
{
103
  fn->last_stmt_uid = maxid;
104
}
105
 
106
/* Set the number of the next statement uid to be allocated.  */
107
static inline unsigned int
108
inc_gimple_stmt_max_uid (struct function *fn)
109
{
110
  return fn->last_stmt_uid++;
111
}
112
 
113
/* Return the line number for EXPR, or return -1 if we have no line
114
   number information for it.  */
115
static inline int
116
get_lineno (const_gimple stmt)
117
{
118
  location_t loc;
119
 
120
  if (!stmt)
121
    return -1;
122
 
123
  loc = gimple_location (stmt);
124
  if (loc == UNKNOWN_LOCATION)
125
    return -1;
126
 
127
  return LOCATION_LINE (loc);
128
}
129
 
130
/* Delink an immediate_uses node from its chain.  */
131
static inline void
132
delink_imm_use (ssa_use_operand_t *linknode)
133
{
134
  /* Return if this node is not in a list.  */
135
  if (linknode->prev == NULL)
136
    return;
137
 
138
  linknode->prev->next = linknode->next;
139
  linknode->next->prev = linknode->prev;
140
  linknode->prev = NULL;
141
  linknode->next = NULL;
142
}
143
 
144
/* Link ssa_imm_use node LINKNODE into the chain for LIST.  */
145
static inline void
146
link_imm_use_to_list (ssa_use_operand_t *linknode, ssa_use_operand_t *list)
147
{
148
  /* Link the new node at the head of the list.  If we are in the process of
149
     traversing the list, we won't visit any new nodes added to it.  */
150
  linknode->prev = list;
151
  linknode->next = list->next;
152
  list->next->prev = linknode;
153
  list->next = linknode;
154
}
155
 
156
/* Link ssa_imm_use node LINKNODE into the chain for DEF.  */
157
static inline void
158
link_imm_use (ssa_use_operand_t *linknode, tree def)
159
{
160
  ssa_use_operand_t *root;
161
 
162
  if (!def || TREE_CODE (def) != SSA_NAME)
163
    linknode->prev = NULL;
164
  else
165
    {
166
      root = &(SSA_NAME_IMM_USE_NODE (def));
167
      if (linknode->use)
168
        gcc_checking_assert (*(linknode->use) == def);
169
      link_imm_use_to_list (linknode, root);
170
    }
171
}
172
 
173
/* Set the value of a use pointed to by USE to VAL.  */
174
static inline void
175
set_ssa_use_from_ptr (use_operand_p use, tree val)
176
{
177
  delink_imm_use (use);
178
  *(use->use) = val;
179
  link_imm_use (use, val);
180
}
181
 
182
/* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring
183
   in STMT.  */
184
static inline void
185
link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, gimple stmt)
186
{
187
  if (stmt)
188
    link_imm_use (linknode, def);
189
  else
190
    link_imm_use (linknode, NULL);
191
  linknode->loc.stmt = stmt;
192
}
193
 
194
/* Relink a new node in place of an old node in the list.  */
195
static inline void
196
relink_imm_use (ssa_use_operand_t *node, ssa_use_operand_t *old)
197
{
198
  /* The node one had better be in the same list.  */
199
  gcc_checking_assert (*(old->use) == *(node->use));
200
  node->prev = old->prev;
201
  node->next = old->next;
202
  if (old->prev)
203
    {
204
      old->prev->next = node;
205
      old->next->prev = node;
206
      /* Remove the old node from the list.  */
207
      old->prev = NULL;
208
    }
209
}
210
 
211
/* Relink ssa_imm_use node LINKNODE into the chain for OLD, with use occurring
212
   in STMT.  */
213
static inline void
214
relink_imm_use_stmt (ssa_use_operand_t *linknode, ssa_use_operand_t *old,
215
                     gimple stmt)
216
{
217
  if (stmt)
218
    relink_imm_use (linknode, old);
219
  else
220
    link_imm_use (linknode, NULL);
221
  linknode->loc.stmt = stmt;
222
}
223
 
224
 
225
/* Return true is IMM has reached the end of the immediate use list.  */
226
static inline bool
227
end_readonly_imm_use_p (const imm_use_iterator *imm)
228
{
229
  return (imm->imm_use == imm->end_p);
230
}
231
 
232
/* Initialize iterator IMM to process the list for VAR.  */
233
static inline use_operand_p
234
first_readonly_imm_use (imm_use_iterator *imm, tree var)
235
{
236
  imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
237
  imm->imm_use = imm->end_p->next;
238
#ifdef ENABLE_CHECKING
239
  imm->iter_node.next = imm->imm_use->next;
240
#endif
241
  if (end_readonly_imm_use_p (imm))
242
    return NULL_USE_OPERAND_P;
243
  return imm->imm_use;
244
}
245
 
246
/* Bump IMM to the next use in the list.  */
247
static inline use_operand_p
248
next_readonly_imm_use (imm_use_iterator *imm)
249
{
250
  use_operand_p old = imm->imm_use;
251
 
252
#ifdef ENABLE_CHECKING
253
  /* If this assertion fails, it indicates the 'next' pointer has changed
254
     since the last bump.  This indicates that the list is being modified
255
     via stmt changes, or SET_USE, or somesuch thing, and you need to be
256
     using the SAFE version of the iterator.  */
257
  gcc_assert (imm->iter_node.next == old->next);
258
  imm->iter_node.next = old->next->next;
259
#endif
260
 
261
  imm->imm_use = old->next;
262
  if (end_readonly_imm_use_p (imm))
263
    return NULL_USE_OPERAND_P;
264
  return imm->imm_use;
265
}
266
 
267
/* tree-cfg.c */
268
extern bool has_zero_uses_1 (const ssa_use_operand_t *head);
269
extern bool single_imm_use_1 (const ssa_use_operand_t *head,
270
                              use_operand_p *use_p, gimple *stmt);
271
 
272
/* Return true if VAR has no nondebug uses.  */
273
static inline bool
274
has_zero_uses (const_tree var)
275
{
276
  const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
277
 
278
  /* A single use_operand means there is no items in the list.  */
279
  if (ptr == ptr->next)
280
    return true;
281
 
282
  /* If there are debug stmts, we have to look at each use and see
283
     whether there are any nondebug uses.  */
284
  if (!MAY_HAVE_DEBUG_STMTS)
285
    return false;
286
 
287
  return has_zero_uses_1 (ptr);
288
}
289
 
290
/* Return true if VAR has a single nondebug use.  */
291
static inline bool
292
has_single_use (const_tree var)
293
{
294
  const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
295
 
296
  /* If there aren't any uses whatsoever, we're done.  */
297
  if (ptr == ptr->next)
298
    return false;
299
 
300
  /* If there's a single use, check that it's not a debug stmt.  */
301
  if (ptr == ptr->next->next)
302
    return !is_gimple_debug (USE_STMT (ptr->next));
303
 
304
  /* If there are debug stmts, we have to look at each of them.  */
305
  if (!MAY_HAVE_DEBUG_STMTS)
306
    return false;
307
 
308
  return single_imm_use_1 (ptr, NULL, NULL);
309
}
310
 
311
 
312
/* If VAR has only a single immediate nondebug use, return true, and
313
   set USE_P and STMT to the use pointer and stmt of occurrence.  */
314
static inline bool
315
single_imm_use (const_tree var, use_operand_p *use_p, gimple *stmt)
316
{
317
  const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
318
 
319
  /* If there aren't any uses whatsoever, we're done.  */
320
  if (ptr == ptr->next)
321
    {
322
    return_false:
323
      *use_p = NULL_USE_OPERAND_P;
324
      *stmt = NULL;
325
      return false;
326
    }
327
 
328
  /* If there's a single use, check that it's not a debug stmt.  */
329
  if (ptr == ptr->next->next)
330
    {
331
      if (!is_gimple_debug (USE_STMT (ptr->next)))
332
        {
333
          *use_p = ptr->next;
334
          *stmt = ptr->next->loc.stmt;
335
          return true;
336
        }
337
      else
338
        goto return_false;
339
    }
340
 
341
  /* If there are debug stmts, we have to look at each of them.  */
342
  if (!MAY_HAVE_DEBUG_STMTS)
343
    goto return_false;
344
 
345
  return single_imm_use_1 (ptr, use_p, stmt);
346
}
347
 
348
/* Return the number of nondebug immediate uses of VAR.  */
349
static inline unsigned int
350
num_imm_uses (const_tree var)
351
{
352
  const ssa_use_operand_t *const start = &(SSA_NAME_IMM_USE_NODE (var));
353
  const ssa_use_operand_t *ptr;
354
  unsigned int num = 0;
355
 
356
  if (!MAY_HAVE_DEBUG_STMTS)
357
    for (ptr = start->next; ptr != start; ptr = ptr->next)
358
      num++;
359
  else
360
    for (ptr = start->next; ptr != start; ptr = ptr->next)
361
      if (!is_gimple_debug (USE_STMT (ptr)))
362
        num++;
363
 
364
  return num;
365
}
366
 
367
/* Return the tree pointed-to by USE.  */
368
static inline tree
369
get_use_from_ptr (use_operand_p use)
370
{
371
  return *(use->use);
372
}
373
 
374
/* Return the tree pointed-to by DEF.  */
375
static inline tree
376
get_def_from_ptr (def_operand_p def)
377
{
378
  return *def;
379
}
380
 
381
/* Return a use_operand_p pointer for argument I of PHI node GS.  */
382
 
383
static inline use_operand_p
384
gimple_phi_arg_imm_use_ptr (gimple gs, int i)
385
{
386
  return &gimple_phi_arg (gs, i)->imm_use;
387
}
388
 
389
/* Return the tree operand for argument I of PHI node GS.  */
390
 
391
static inline tree
392
gimple_phi_arg_def (gimple gs, size_t index)
393
{
394
  struct phi_arg_d *pd = gimple_phi_arg (gs, index);
395
  return get_use_from_ptr (&pd->imm_use);
396
}
397
 
398
/* Return a pointer to the tree operand for argument I of PHI node GS.  */
399
 
400
static inline tree *
401
gimple_phi_arg_def_ptr (gimple gs, size_t index)
402
{
403
  return &gimple_phi_arg (gs, index)->def;
404
}
405
 
406
/* Return the edge associated with argument I of phi node GS.  */
407
 
408
static inline edge
409
gimple_phi_arg_edge (gimple gs, size_t i)
410
{
411
  return EDGE_PRED (gimple_bb (gs), i);
412
}
413
 
414
/* Return the source location of gimple argument I of phi node GS.  */
415
 
416
static inline source_location
417
gimple_phi_arg_location (gimple gs, size_t i)
418
{
419
  return gimple_phi_arg (gs, i)->locus;
420
}
421
 
422
/* Return the source location of the argument on edge E of phi node GS.  */
423
 
424
static inline source_location
425
gimple_phi_arg_location_from_edge (gimple gs, edge e)
426
{
427
  return gimple_phi_arg (gs, e->dest_idx)->locus;
428
}
429
 
430
/* Set the source location of gimple argument I of phi node GS to LOC.  */
431
 
432
static inline void
433
gimple_phi_arg_set_location (gimple gs, size_t i, source_location loc)
434
{
435
  gimple_phi_arg (gs, i)->locus = loc;
436
}
437
 
438
/* Return TRUE if argument I of phi node GS has a location record.  */
439
 
440
static inline bool
441
gimple_phi_arg_has_location (gimple gs, size_t i)
442
{
443
  return gimple_phi_arg_location (gs, i) != UNKNOWN_LOCATION;
444
}
445
 
446
 
447
/* Return the PHI nodes for basic block BB, or NULL if there are no
448
   PHI nodes.  */
449
static inline gimple_seq
450
phi_nodes (const_basic_block bb)
451
{
452
  gcc_checking_assert (!(bb->flags & BB_RTL));
453
  return bb->il.gimple.phi_nodes;
454
}
455
 
456
static inline gimple_seq *
457
phi_nodes_ptr (basic_block bb)
458
{
459
  gcc_checking_assert (!(bb->flags & BB_RTL));
460
  return &bb->il.gimple.phi_nodes;
461
}
462
 
463
/* Set PHI nodes of a basic block BB to SEQ.  */
464
 
465
static inline void
466
set_phi_nodes (basic_block bb, gimple_seq seq)
467
{
468
  gimple_stmt_iterator i;
469
 
470
  gcc_checking_assert (!(bb->flags & BB_RTL));
471
  bb->il.gimple.phi_nodes = seq;
472
  if (seq)
473
    for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
474
      gimple_set_bb (gsi_stmt (i), bb);
475
}
476
 
477
/* Return the phi argument which contains the specified use.  */
478
 
479
static inline int
480
phi_arg_index_from_use (use_operand_p use)
481
{
482
  struct phi_arg_d *element, *root;
483
  size_t index;
484
  gimple phi;
485
 
486
  /* Since the use is the first thing in a PHI argument element, we can
487
     calculate its index based on casting it to an argument, and performing
488
     pointer arithmetic.  */
489
 
490
  phi = USE_STMT (use);
491
 
492
  element = (struct phi_arg_d *)use;
493
  root = gimple_phi_arg (phi, 0);
494
  index = element - root;
495
 
496
  /* Make sure the calculation doesn't have any leftover bytes.  If it does,
497
     then imm_use is likely not the first element in phi_arg_d.  */
498
  gcc_checking_assert ((((char *)element - (char *)root)
499
                        % sizeof (struct phi_arg_d)) == 0
500
                       && index < gimple_phi_capacity (phi));
501
 
502
 return index;
503
}
504
 
505
/* Return true if T (assumed to be a DECL) is a global variable.
506
   A variable is considered global if its storage is not automatic.  */
507
 
508
static inline bool
509
is_global_var (const_tree t)
510
{
511
  return (TREE_STATIC (t) || DECL_EXTERNAL (t));
512
}
513
 
514
 
515
/* Return true if VAR may be aliased.  A variable is considered as
516
   maybe aliased if it has its address taken by the local TU
517
   or possibly by another TU and might be modified through a pointer.  */
518
 
519
static inline bool
520
may_be_aliased (const_tree var)
521
{
522
  return (TREE_CODE (var) != CONST_DECL
523
          && !((TREE_STATIC (var) || TREE_PUBLIC (var) || DECL_EXTERNAL (var))
524
               && TREE_READONLY (var)
525
               && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (var)))
526
          && (TREE_PUBLIC (var)
527
              || DECL_EXTERNAL (var)
528
              || TREE_ADDRESSABLE (var)));
529
}
530
 
531
 
532
/* PHI nodes should contain only ssa_names and invariants.  A test
533
   for ssa_name is definitely simpler; don't let invalid contents
534
   slip in in the meantime.  */
535
 
536
static inline bool
537
phi_ssa_name_p (const_tree t)
538
{
539
  if (TREE_CODE (t) == SSA_NAME)
540
    return true;
541
  gcc_checking_assert (is_gimple_min_invariant (t));
542
  return false;
543
}
544
 
545
 
546
/* Returns the loop of the statement STMT.  */
547
 
548
static inline struct loop *
549
loop_containing_stmt (gimple stmt)
550
{
551
  basic_block bb = gimple_bb (stmt);
552
  if (!bb)
553
    return NULL;
554
 
555
  return bb->loop_father;
556
}
557
 
558
 
559
/*  -----------------------------------------------------------------------  */
560
 
561
/* The following set of routines are used to iterator over various type of
562
   SSA operands.  */
563
 
564
/* Return true if PTR is finished iterating.  */
565
static inline bool
566
op_iter_done (const ssa_op_iter *ptr)
567
{
568
  return ptr->done;
569
}
570
 
571
/* Get the next iterator use value for PTR.  */
572
static inline use_operand_p
573
op_iter_next_use (ssa_op_iter *ptr)
574
{
575
  use_operand_p use_p;
576
  gcc_checking_assert (ptr->iter_type == ssa_op_iter_use);
577
  if (ptr->uses)
578
    {
579
      use_p = USE_OP_PTR (ptr->uses);
580
      ptr->uses = ptr->uses->next;
581
      return use_p;
582
    }
583
  if (ptr->i < ptr->numops)
584
    {
585
      return PHI_ARG_DEF_PTR (ptr->stmt, (ptr->i)++);
586
    }
587
  ptr->done = true;
588
  return NULL_USE_OPERAND_P;
589
}
590
 
591
/* Get the next iterator def value for PTR.  */
592
static inline def_operand_p
593
op_iter_next_def (ssa_op_iter *ptr)
594
{
595
  gcc_checking_assert (ptr->iter_type == ssa_op_iter_def);
596
  if (ptr->flags & SSA_OP_VDEF)
597
    {
598
      tree *p;
599
      ptr->flags &= ~SSA_OP_VDEF;
600
      p = gimple_vdef_ptr (ptr->stmt);
601
      if (p && *p)
602
        return p;
603
    }
604
  if (ptr->flags & SSA_OP_DEF)
605
    {
606
      while (ptr->i < ptr->numops)
607
        {
608
          tree *val = gimple_op_ptr (ptr->stmt, ptr->i);
609
          ptr->i++;
610
          if (*val)
611
            {
612
              if (TREE_CODE (*val) == TREE_LIST)
613
                val = &TREE_VALUE (*val);
614
              if (TREE_CODE (*val) == SSA_NAME
615
                  || is_gimple_reg (*val))
616
                return val;
617
            }
618
        }
619
      ptr->flags &= ~SSA_OP_DEF;
620
    }
621
 
622
  ptr->done = true;
623
  return NULL_DEF_OPERAND_P;
624
}
625
 
626
/* Get the next iterator tree value for PTR.  */
627
static inline tree
628
op_iter_next_tree (ssa_op_iter *ptr)
629
{
630
  tree val;
631
  gcc_checking_assert (ptr->iter_type == ssa_op_iter_tree);
632
  if (ptr->uses)
633
    {
634
      val = USE_OP (ptr->uses);
635
      ptr->uses = ptr->uses->next;
636
      return val;
637
    }
638
  if (ptr->flags & SSA_OP_VDEF)
639
    {
640
      ptr->flags &= ~SSA_OP_VDEF;
641
      if ((val = gimple_vdef (ptr->stmt)))
642
        return val;
643
    }
644
  if (ptr->flags & SSA_OP_DEF)
645
    {
646
      while (ptr->i < ptr->numops)
647
        {
648
          val = gimple_op (ptr->stmt, ptr->i);
649
          ptr->i++;
650
          if (val)
651
            {
652
              if (TREE_CODE (val) == TREE_LIST)
653
                val = TREE_VALUE (val);
654
              if (TREE_CODE (val) == SSA_NAME
655
                  || is_gimple_reg (val))
656
                return val;
657
            }
658
        }
659
      ptr->flags &= ~SSA_OP_DEF;
660
    }
661
 
662
  ptr->done = true;
663
  return NULL_TREE;
664
}
665
 
666
 
667
/* This functions clears the iterator PTR, and marks it done.  This is normally
668
   used to prevent warnings in the compile about might be uninitialized
669
   components.  */
670
 
671
static inline void
672
clear_and_done_ssa_iter (ssa_op_iter *ptr)
673
{
674
  ptr->i = 0;
675
  ptr->numops = 0;
676
  ptr->uses = NULL;
677
  ptr->iter_type = ssa_op_iter_none;
678
  ptr->stmt = NULL;
679
  ptr->done = true;
680
  ptr->flags = 0;
681
}
682
 
683
/* Initialize the iterator PTR to the virtual defs in STMT.  */
684
static inline void
685
op_iter_init (ssa_op_iter *ptr, gimple stmt, int flags)
686
{
687
  /* PHI nodes require a different iterator initialization path.  We
688
     do not support iterating over virtual defs or uses without
689
     iterating over defs or uses at the same time.  */
690
  gcc_checking_assert (gimple_code (stmt) != GIMPLE_PHI
691
                       && (!(flags & SSA_OP_VDEF) || (flags & SSA_OP_DEF))
692
                       && (!(flags & SSA_OP_VUSE) || (flags & SSA_OP_USE)));
693
  ptr->numops = 0;
694
  if (flags & (SSA_OP_DEF | SSA_OP_VDEF))
695
    {
696
      switch (gimple_code (stmt))
697
        {
698
          case GIMPLE_ASSIGN:
699
          case GIMPLE_CALL:
700
            ptr->numops = 1;
701
            break;
702
          case GIMPLE_ASM:
703
            ptr->numops = gimple_asm_noutputs (stmt);
704
            break;
705
          default:
706
            ptr->numops = 0;
707
            flags &= ~(SSA_OP_DEF | SSA_OP_VDEF);
708
            break;
709
        }
710
    }
711
  ptr->uses = (flags & (SSA_OP_USE|SSA_OP_VUSE)) ? gimple_use_ops (stmt) : NULL;
712
  if (!(flags & SSA_OP_VUSE)
713
      && ptr->uses
714
      && gimple_vuse (stmt) != NULL_TREE)
715
    ptr->uses = ptr->uses->next;
716
  ptr->done = false;
717
  ptr->i = 0;
718
 
719
  ptr->stmt = stmt;
720
  ptr->flags = flags;
721
}
722
 
723
/* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
724
   the first use.  */
725
static inline use_operand_p
726
op_iter_init_use (ssa_op_iter *ptr, gimple stmt, int flags)
727
{
728
  gcc_checking_assert ((flags & SSA_OP_ALL_DEFS) == 0
729
                       && (flags & SSA_OP_USE));
730
  op_iter_init (ptr, stmt, flags);
731
  ptr->iter_type = ssa_op_iter_use;
732
  return op_iter_next_use (ptr);
733
}
734
 
735
/* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
736
   the first def.  */
737
static inline def_operand_p
738
op_iter_init_def (ssa_op_iter *ptr, gimple stmt, int flags)
739
{
740
  gcc_checking_assert ((flags & SSA_OP_ALL_USES) == 0
741
                       && (flags & SSA_OP_DEF));
742
  op_iter_init (ptr, stmt, flags);
743
  ptr->iter_type = ssa_op_iter_def;
744
  return op_iter_next_def (ptr);
745
}
746
 
747
/* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
748
   the first operand as a tree.  */
749
static inline tree
750
op_iter_init_tree (ssa_op_iter *ptr, gimple stmt, int flags)
751
{
752
  op_iter_init (ptr, stmt, flags);
753
  ptr->iter_type = ssa_op_iter_tree;
754
  return op_iter_next_tree (ptr);
755
}
756
 
757
 
758
/* If there is a single operand in STMT matching FLAGS, return it.  Otherwise
759
   return NULL.  */
760
static inline tree
761
single_ssa_tree_operand (gimple stmt, int flags)
762
{
763
  tree var;
764
  ssa_op_iter iter;
765
 
766
  var = op_iter_init_tree (&iter, stmt, flags);
767
  if (op_iter_done (&iter))
768
    return NULL_TREE;
769
  op_iter_next_tree (&iter);
770
  if (op_iter_done (&iter))
771
    return var;
772
  return NULL_TREE;
773
}
774
 
775
 
776
/* If there is a single operand in STMT matching FLAGS, return it.  Otherwise
777
   return NULL.  */
778
static inline use_operand_p
779
single_ssa_use_operand (gimple stmt, int flags)
780
{
781
  use_operand_p var;
782
  ssa_op_iter iter;
783
 
784
  var = op_iter_init_use (&iter, stmt, flags);
785
  if (op_iter_done (&iter))
786
    return NULL_USE_OPERAND_P;
787
  op_iter_next_use (&iter);
788
  if (op_iter_done (&iter))
789
    return var;
790
  return NULL_USE_OPERAND_P;
791
}
792
 
793
 
794
 
795
/* If there is a single operand in STMT matching FLAGS, return it.  Otherwise
796
   return NULL.  */
797
static inline def_operand_p
798
single_ssa_def_operand (gimple stmt, int flags)
799
{
800
  def_operand_p var;
801
  ssa_op_iter iter;
802
 
803
  var = op_iter_init_def (&iter, stmt, flags);
804
  if (op_iter_done (&iter))
805
    return NULL_DEF_OPERAND_P;
806
  op_iter_next_def (&iter);
807
  if (op_iter_done (&iter))
808
    return var;
809
  return NULL_DEF_OPERAND_P;
810
}
811
 
812
 
813
/* Return true if there are zero operands in STMT matching the type
814
   given in FLAGS.  */
815
static inline bool
816
zero_ssa_operands (gimple stmt, int flags)
817
{
818
  ssa_op_iter iter;
819
 
820
  op_iter_init_tree (&iter, stmt, flags);
821
  return op_iter_done (&iter);
822
}
823
 
824
 
825
/* Return the number of operands matching FLAGS in STMT.  */
826
static inline int
827
num_ssa_operands (gimple stmt, int flags)
828
{
829
  ssa_op_iter iter;
830
  tree t;
831
  int num = 0;
832
 
833
  gcc_checking_assert (gimple_code (stmt) != GIMPLE_PHI);
834
  FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, flags)
835
    num++;
836
  return num;
837
}
838
 
839
static inline use_operand_p
840
op_iter_init_phiuse (ssa_op_iter *ptr, gimple phi, int flags);
841
 
842
/* Delink all immediate_use information for STMT.  */
843
static inline void
844
delink_stmt_imm_use (gimple stmt)
845
{
846
   ssa_op_iter iter;
847
   use_operand_p use_p;
848
 
849
   if (ssa_operands_active (cfun))
850
     FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_ALL_USES)
851
       delink_imm_use (use_p);
852
}
853
 
854
 
855
/* If there is a single DEF in the PHI node which matches FLAG, return it.
856
   Otherwise return NULL_DEF_OPERAND_P.  */
857
static inline tree
858
single_phi_def (gimple stmt, int flags)
859
{
860
  tree def = PHI_RESULT (stmt);
861
  if ((flags & SSA_OP_DEF) && is_gimple_reg (def))
862
    return def;
863
  if ((flags & SSA_OP_VIRTUAL_DEFS) && !is_gimple_reg (def))
864
    return def;
865
  return NULL_TREE;
866
}
867
 
868
/* Initialize the iterator PTR for uses matching FLAGS in PHI.  FLAGS should
869
   be either SSA_OP_USES or SSA_OP_VIRTUAL_USES.  */
870
static inline use_operand_p
871
op_iter_init_phiuse (ssa_op_iter *ptr, gimple phi, int flags)
872
{
873
  tree phi_def = gimple_phi_result (phi);
874
  int comp;
875
 
876
  clear_and_done_ssa_iter (ptr);
877
  ptr->done = false;
878
 
879
  gcc_checking_assert ((flags & (SSA_OP_USE | SSA_OP_VIRTUAL_USES)) != 0);
880
 
881
  comp = (is_gimple_reg (phi_def) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
882
 
883
  /* If the PHI node doesn't the operand type we care about, we're done.  */
884
  if ((flags & comp) == 0)
885
    {
886
      ptr->done = true;
887
      return NULL_USE_OPERAND_P;
888
    }
889
 
890
  ptr->stmt = phi;
891
  ptr->numops = gimple_phi_num_args (phi);
892
  ptr->iter_type = ssa_op_iter_use;
893
  ptr->flags = flags;
894
  return op_iter_next_use (ptr);
895
}
896
 
897
 
898
/* Start an iterator for a PHI definition.  */
899
 
900
static inline def_operand_p
901
op_iter_init_phidef (ssa_op_iter *ptr, gimple phi, int flags)
902
{
903
  tree phi_def = PHI_RESULT (phi);
904
  int comp;
905
 
906
  clear_and_done_ssa_iter (ptr);
907
  ptr->done = false;
908
 
909
  gcc_checking_assert ((flags & (SSA_OP_DEF | SSA_OP_VIRTUAL_DEFS)) != 0);
910
 
911
  comp = (is_gimple_reg (phi_def) ? SSA_OP_DEF : SSA_OP_VIRTUAL_DEFS);
912
 
913
  /* If the PHI node doesn't have the operand type we care about,
914
     we're done.  */
915
  if ((flags & comp) == 0)
916
    {
917
      ptr->done = true;
918
      return NULL_DEF_OPERAND_P;
919
    }
920
 
921
  ptr->iter_type = ssa_op_iter_def;
922
  /* The first call to op_iter_next_def will terminate the iterator since
923
     all the fields are NULL.  Simply return the result here as the first and
924
     therefore only result.  */
925
  return PHI_RESULT_PTR (phi);
926
}
927
 
928
/* Return true is IMM has reached the end of the immediate use stmt list.  */
929
 
930
static inline bool
931
end_imm_use_stmt_p (const imm_use_iterator *imm)
932
{
933
  return (imm->imm_use == imm->end_p);
934
}
935
 
936
/* Finished the traverse of an immediate use stmt list IMM by removing the
937
   placeholder node from the list.  */
938
 
939
static inline void
940
end_imm_use_stmt_traverse (imm_use_iterator *imm)
941
{
942
  delink_imm_use (&(imm->iter_node));
943
}
944
 
945
/* Immediate use traversal of uses within a stmt require that all the
946
   uses on a stmt be sequentially listed.  This routine is used to build up
947
   this sequential list by adding USE_P to the end of the current list
948
   currently delimited by HEAD and LAST_P.  The new LAST_P value is
949
   returned.  */
950
 
951
static inline use_operand_p
952
move_use_after_head (use_operand_p use_p, use_operand_p head,
953
                      use_operand_p last_p)
954
{
955
  gcc_checking_assert (USE_FROM_PTR (use_p) == USE_FROM_PTR (head));
956
  /* Skip head when we find it.  */
957
  if (use_p != head)
958
    {
959
      /* If use_p is already linked in after last_p, continue.  */
960
      if (last_p->next == use_p)
961
        last_p = use_p;
962
      else
963
        {
964
          /* Delink from current location, and link in at last_p.  */
965
          delink_imm_use (use_p);
966
          link_imm_use_to_list (use_p, last_p);
967
          last_p = use_p;
968
        }
969
    }
970
  return last_p;
971
}
972
 
973
 
974
/* This routine will relink all uses with the same stmt as HEAD into the list
975
   immediately following HEAD for iterator IMM.  */
976
 
977
static inline void
978
link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
979
{
980
  use_operand_p use_p;
981
  use_operand_p last_p = head;
982
  gimple head_stmt = USE_STMT (head);
983
  tree use = USE_FROM_PTR (head);
984
  ssa_op_iter op_iter;
985
  int flag;
986
 
987
  /* Only look at virtual or real uses, depending on the type of HEAD.  */
988
  flag = (is_gimple_reg (use) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
989
 
990
  if (gimple_code (head_stmt) == GIMPLE_PHI)
991
    {
992
      FOR_EACH_PHI_ARG (use_p, head_stmt, op_iter, flag)
993
        if (USE_FROM_PTR (use_p) == use)
994
          last_p = move_use_after_head (use_p, head, last_p);
995
    }
996
  else
997
    {
998
      if (flag == SSA_OP_USE)
999
        {
1000
          FOR_EACH_SSA_USE_OPERAND (use_p, head_stmt, op_iter, flag)
1001
            if (USE_FROM_PTR (use_p) == use)
1002
              last_p = move_use_after_head (use_p, head, last_p);
1003
        }
1004
      else if ((use_p = gimple_vuse_op (head_stmt)) != NULL_USE_OPERAND_P)
1005
        {
1006
          if (USE_FROM_PTR (use_p) == use)
1007
            last_p = move_use_after_head (use_p, head, last_p);
1008
        }
1009
    }
1010
  /* Link iter node in after last_p.  */
1011
  if (imm->iter_node.prev != NULL)
1012
    delink_imm_use (&imm->iter_node);
1013
  link_imm_use_to_list (&(imm->iter_node), last_p);
1014
}
1015
 
1016
/* Initialize IMM to traverse over uses of VAR.  Return the first statement.  */
1017
static inline gimple
1018
first_imm_use_stmt (imm_use_iterator *imm, tree var)
1019
{
1020
  imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
1021
  imm->imm_use = imm->end_p->next;
1022
  imm->next_imm_name = NULL_USE_OPERAND_P;
1023
 
1024
  /* iter_node is used as a marker within the immediate use list to indicate
1025
     where the end of the current stmt's uses are.  Initialize it to NULL
1026
     stmt and use, which indicates a marker node.  */
1027
  imm->iter_node.prev = NULL_USE_OPERAND_P;
1028
  imm->iter_node.next = NULL_USE_OPERAND_P;
1029
  imm->iter_node.loc.stmt = NULL;
1030
  imm->iter_node.use = NULL;
1031
 
1032
  if (end_imm_use_stmt_p (imm))
1033
    return NULL;
1034
 
1035
  link_use_stmts_after (imm->imm_use, imm);
1036
 
1037
  return USE_STMT (imm->imm_use);
1038
}
1039
 
1040
/* Bump IMM to the next stmt which has a use of var.  */
1041
 
1042
static inline gimple
1043
next_imm_use_stmt (imm_use_iterator *imm)
1044
{
1045
  imm->imm_use = imm->iter_node.next;
1046
  if (end_imm_use_stmt_p (imm))
1047
    {
1048
      if (imm->iter_node.prev != NULL)
1049
        delink_imm_use (&imm->iter_node);
1050
      return NULL;
1051
    }
1052
 
1053
  link_use_stmts_after (imm->imm_use, imm);
1054
  return USE_STMT (imm->imm_use);
1055
}
1056
 
1057
/* This routine will return the first use on the stmt IMM currently refers
1058
   to.  */
1059
 
1060
static inline use_operand_p
1061
first_imm_use_on_stmt (imm_use_iterator *imm)
1062
{
1063
  imm->next_imm_name = imm->imm_use->next;
1064
  return imm->imm_use;
1065
}
1066
 
1067
/*  Return TRUE if the last use on the stmt IMM refers to has been visited.  */
1068
 
1069
static inline bool
1070
end_imm_use_on_stmt_p (const imm_use_iterator *imm)
1071
{
1072
  return (imm->imm_use == &(imm->iter_node));
1073
}
1074
 
1075
/* Bump to the next use on the stmt IMM refers to, return NULL if done.  */
1076
 
1077
static inline use_operand_p
1078
next_imm_use_on_stmt (imm_use_iterator *imm)
1079
{
1080
  imm->imm_use = imm->next_imm_name;
1081
  if (end_imm_use_on_stmt_p (imm))
1082
    return NULL_USE_OPERAND_P;
1083
  else
1084
    {
1085
      imm->next_imm_name = imm->imm_use->next;
1086
      return imm->imm_use;
1087
    }
1088
}
1089
 
1090
/* Return true if VAR cannot be modified by the program.  */
1091
 
1092
static inline bool
1093
unmodifiable_var_p (const_tree var)
1094
{
1095
  if (TREE_CODE (var) == SSA_NAME)
1096
    var = SSA_NAME_VAR (var);
1097
 
1098
  return TREE_READONLY (var) && (TREE_STATIC (var) || DECL_EXTERNAL (var));
1099
}
1100
 
1101
/* Return true if REF, a handled component reference, has an ARRAY_REF
1102
   somewhere in it.  */
1103
 
1104
static inline bool
1105
ref_contains_array_ref (const_tree ref)
1106
{
1107
  gcc_checking_assert (handled_component_p (ref));
1108
 
1109
  do {
1110
    if (TREE_CODE (ref) == ARRAY_REF)
1111
      return true;
1112
    ref = TREE_OPERAND (ref, 0);
1113
  } while (handled_component_p (ref));
1114
 
1115
  return false;
1116
}
1117
 
1118
/* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it.  */
1119
 
1120
static inline bool
1121
contains_view_convert_expr_p (const_tree ref)
1122
{
1123
  while (handled_component_p (ref))
1124
    {
1125
      if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1126
        return true;
1127
      ref = TREE_OPERAND (ref, 0);
1128
    }
1129
 
1130
  return false;
1131
}
1132
 
1133
/* Return true, if the two ranges [POS1, SIZE1] and [POS2, SIZE2]
1134
   overlap.  SIZE1 and/or SIZE2 can be (unsigned)-1 in which case the
1135
   range is open-ended.  Otherwise return false.  */
1136
 
1137
static inline bool
1138
ranges_overlap_p (unsigned HOST_WIDE_INT pos1,
1139
                  unsigned HOST_WIDE_INT size1,
1140
                  unsigned HOST_WIDE_INT pos2,
1141
                  unsigned HOST_WIDE_INT size2)
1142
{
1143
  if (pos1 >= pos2
1144
      && (size2 == (unsigned HOST_WIDE_INT)-1
1145
          || pos1 < (pos2 + size2)))
1146
    return true;
1147
  if (pos2 >= pos1
1148
      && (size1 == (unsigned HOST_WIDE_INT)-1
1149
          || pos2 < (pos1 + size1)))
1150
    return true;
1151
 
1152
  return false;
1153
}
1154
 
1155
/* Accessor to tree-ssa-operands.c caches.  */
1156
static inline struct ssa_operands *
1157
gimple_ssa_operands (const struct function *fun)
1158
{
1159
  return &fun->gimple_df->ssa_operands;
1160
}
1161
 
1162
/* Given an edge_var_map V, return the PHI arg definition.  */
1163
 
1164
static inline tree
1165
redirect_edge_var_map_def (edge_var_map *v)
1166
{
1167
  return v->def;
1168
}
1169
 
1170
/* Given an edge_var_map V, return the PHI result.  */
1171
 
1172
static inline tree
1173
redirect_edge_var_map_result (edge_var_map *v)
1174
{
1175
  return v->result;
1176
}
1177
 
1178
/* Given an edge_var_map V, return the PHI arg location.  */
1179
 
1180
static inline source_location
1181
redirect_edge_var_map_location (edge_var_map *v)
1182
{
1183
  return v->locus;
1184
}
1185
 
1186
 
1187
/* Return an SSA_NAME node for variable VAR defined in statement STMT
1188
   in function cfun.  */
1189
 
1190
static inline tree
1191
make_ssa_name (tree var, gimple stmt)
1192
{
1193
  return make_ssa_name_fn (cfun, var, stmt);
1194
}
1195
 
1196
/* Return an SSA_NAME node using the template SSA name NAME defined in
1197
   statement STMT in function cfun.  */
1198
 
1199
static inline tree
1200
copy_ssa_name (tree var, gimple stmt)
1201
{
1202
  return copy_ssa_name_fn (cfun, var, stmt);
1203
}
1204
 
1205
/*  Creates a duplicate of a SSA name NAME tobe defined by statement STMT
1206
    in function cfun.  */
1207
 
1208
static inline tree
1209
duplicate_ssa_name (tree var, gimple stmt)
1210
{
1211
  return duplicate_ssa_name_fn (cfun, var, stmt);
1212
}
1213
 
1214
/* Return an anonymous SSA_NAME node for type TYPE defined in statement STMT
1215
   in function cfun.  Arrange so that it uses NAME in dumps.  */
1216
 
1217
static inline tree
1218
make_temp_ssa_name (tree type, gimple stmt, const char *name)
1219
{
1220
  tree ssa_name;
1221
  gcc_checking_assert (TYPE_P (type));
1222
  ssa_name = make_ssa_name_fn (cfun, type, stmt);
1223
  SET_SSA_NAME_VAR_OR_IDENTIFIER (ssa_name, get_identifier (name));
1224
  return ssa_name;
1225
}
1226
 
1227
/* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
1228
   denotes the starting address of the memory access EXP.
1229
   Returns NULL_TREE if the offset is not constant or any component
1230
   is not BITS_PER_UNIT-aligned.
1231
   VALUEIZE if non-NULL is used to valueize SSA names.  It should return
1232
   its argument or a constant if the argument is known to be constant.  */
1233
/* ??? This is a static inline here to avoid the overhead of the indirect calls
1234
   to VALUEIZE.  But is this overhead really that significant?  And should we
1235
   perhaps just rely on WHOPR to specialize the function?  */
1236
 
1237
static inline tree
1238
get_addr_base_and_unit_offset_1 (tree exp, HOST_WIDE_INT *poffset,
1239
                                 tree (*valueize) (tree))
1240
{
1241
  HOST_WIDE_INT byte_offset = 0;
1242
 
1243
  /* Compute cumulative byte-offset for nested component-refs and array-refs,
1244
     and find the ultimate containing object.  */
1245
  while (1)
1246
    {
1247
      switch (TREE_CODE (exp))
1248
        {
1249
        case BIT_FIELD_REF:
1250
          return NULL_TREE;
1251
 
1252
        case COMPONENT_REF:
1253
          {
1254
            tree field = TREE_OPERAND (exp, 1);
1255
            tree this_offset = component_ref_field_offset (exp);
1256
            HOST_WIDE_INT hthis_offset;
1257
 
1258
            if (!this_offset
1259
                || TREE_CODE (this_offset) != INTEGER_CST
1260
                || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
1261
                    % BITS_PER_UNIT))
1262
              return NULL_TREE;
1263
 
1264
            hthis_offset = TREE_INT_CST_LOW (this_offset);
1265
            hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
1266
                             / BITS_PER_UNIT);
1267
            byte_offset += hthis_offset;
1268
          }
1269
          break;
1270
 
1271
        case ARRAY_REF:
1272
        case ARRAY_RANGE_REF:
1273
          {
1274
            tree index = TREE_OPERAND (exp, 1);
1275
            tree low_bound, unit_size;
1276
 
1277
            if (valueize
1278
                && TREE_CODE (index) == SSA_NAME)
1279
              index = (*valueize) (index);
1280
 
1281
            /* If the resulting bit-offset is constant, track it.  */
1282
            if (TREE_CODE (index) == INTEGER_CST
1283
                && (low_bound = array_ref_low_bound (exp),
1284
                    TREE_CODE (low_bound) == INTEGER_CST)
1285
                && (unit_size = array_ref_element_size (exp),
1286
                    TREE_CODE (unit_size) == INTEGER_CST))
1287
              {
1288
                HOST_WIDE_INT hindex = TREE_INT_CST_LOW (index);
1289
 
1290
                hindex -= TREE_INT_CST_LOW (low_bound);
1291
                hindex *= TREE_INT_CST_LOW (unit_size);
1292
                byte_offset += hindex;
1293
              }
1294
            else
1295
              return NULL_TREE;
1296
          }
1297
          break;
1298
 
1299
        case REALPART_EXPR:
1300
          break;
1301
 
1302
        case IMAGPART_EXPR:
1303
          byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
1304
          break;
1305
 
1306
        case VIEW_CONVERT_EXPR:
1307
          break;
1308
 
1309
        case MEM_REF:
1310
          {
1311
            tree base = TREE_OPERAND (exp, 0);
1312
            if (valueize
1313
                && TREE_CODE (base) == SSA_NAME)
1314
              base = (*valueize) (base);
1315
 
1316
            /* Hand back the decl for MEM[&decl, off].  */
1317
            if (TREE_CODE (base) == ADDR_EXPR)
1318
              {
1319
                if (!integer_zerop (TREE_OPERAND (exp, 1)))
1320
                  {
1321
                    double_int off = mem_ref_offset (exp);
1322
                    gcc_assert (off.high == -1 || off.high == 0);
1323
                    byte_offset += off.to_shwi ();
1324
                  }
1325
                exp = TREE_OPERAND (base, 0);
1326
              }
1327
            goto done;
1328
          }
1329
 
1330
        case TARGET_MEM_REF:
1331
          {
1332
            tree base = TREE_OPERAND (exp, 0);
1333
            if (valueize
1334
                && TREE_CODE (base) == SSA_NAME)
1335
              base = (*valueize) (base);
1336
 
1337
            /* Hand back the decl for MEM[&decl, off].  */
1338
            if (TREE_CODE (base) == ADDR_EXPR)
1339
              {
1340
                if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
1341
                  return NULL_TREE;
1342
                if (!integer_zerop (TMR_OFFSET (exp)))
1343
                  {
1344
                    double_int off = mem_ref_offset (exp);
1345
                    gcc_assert (off.high == -1 || off.high == 0);
1346
                    byte_offset += off.to_shwi ();
1347
                  }
1348
                exp = TREE_OPERAND (base, 0);
1349
              }
1350
            goto done;
1351
          }
1352
 
1353
        default:
1354
          goto done;
1355
        }
1356
 
1357
      exp = TREE_OPERAND (exp, 0);
1358
    }
1359
done:
1360
 
1361
  *poffset = byte_offset;
1362
  return exp;
1363
}
1364
 
1365
#endif /* _TREE_FLOW_INLINE_H  */

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.