OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [alias.c] - Blame information for rev 38

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Alias analysis for GNU C
2
   Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3
   2007 Free Software Foundation, Inc.
4
   Contributed by John Carr (jfc@mit.edu).
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "tree.h"
28
#include "tm_p.h"
29
#include "function.h"
30
#include "alias.h"
31
#include "emit-rtl.h"
32
#include "regs.h"
33
#include "hard-reg-set.h"
34
#include "basic-block.h"
35
#include "flags.h"
36
#include "output.h"
37
#include "toplev.h"
38
#include "cselib.h"
39
#include "splay-tree.h"
40
#include "ggc.h"
41
#include "langhooks.h"
42
#include "timevar.h"
43
#include "target.h"
44
#include "cgraph.h"
45
#include "varray.h"
46
#include "tree-pass.h"
47
#include "ipa-type-escape.h"
48
 
49
/* The aliasing API provided here solves related but different problems:
50
 
51
   Say there exists (in c)
52
 
53
   struct X {
54
     struct Y y1;
55
     struct Z z2;
56
   } x1, *px1,  *px2;
57
 
58
   struct Y y2, *py;
59
   struct Z z2, *pz;
60
 
61
 
62
   py = &px1.y1;
63
   px2 = &x1;
64
 
65
   Consider the four questions:
66
 
67
   Can a store to x1 interfere with px2->y1?
68
   Can a store to x1 interfere with px2->z2?
69
   (*px2).z2
70
   Can a store to x1 change the value pointed to by with py?
71
   Can a store to x1 change the value pointed to by with pz?
72
 
73
   The answer to these questions can be yes, yes, yes, and maybe.
74
 
75
   The first two questions can be answered with a simple examination
76
   of the type system.  If structure X contains a field of type Y then
77
   a store thru a pointer to an X can overwrite any field that is
78
   contained (recursively) in an X (unless we know that px1 != px2).
79
 
80
   The last two of the questions can be solved in the same way as the
81
   first two questions but this is too conservative.  The observation
82
   is that in some cases analysis we can know if which (if any) fields
83
   are addressed and if those addresses are used in bad ways.  This
84
   analysis may be language specific.  In C, arbitrary operations may
85
   be applied to pointers.  However, there is some indication that
86
   this may be too conservative for some C++ types.
87
 
88
   The pass ipa-type-escape does this analysis for the types whose
89
   instances do not escape across the compilation boundary.
90
 
91
   Historically in GCC, these two problems were combined and a single
92
   data structure was used to represent the solution to these
93
   problems.  We now have two similar but different data structures,
94
   The data structure to solve the last two question is similar to the
95
   first, but does not contain have the fields in it whose address are
96
   never taken.  For types that do escape the compilation unit, the
97
   data structures will have identical information.
98
*/
99
 
100
/* The alias sets assigned to MEMs assist the back-end in determining
101
   which MEMs can alias which other MEMs.  In general, two MEMs in
102
   different alias sets cannot alias each other, with one important
103
   exception.  Consider something like:
104
 
105
     struct S { int i; double d; };
106
 
107
   a store to an `S' can alias something of either type `int' or type
108
   `double'.  (However, a store to an `int' cannot alias a `double'
109
   and vice versa.)  We indicate this via a tree structure that looks
110
   like:
111
           struct S
112
            /   \
113
           /     \
114
         |/_     _\|
115
         int    double
116
 
117
   (The arrows are directed and point downwards.)
118
    In this situation we say the alias set for `struct S' is the
119
   `superset' and that those for `int' and `double' are `subsets'.
120
 
121
   To see whether two alias sets can point to the same memory, we must
122
   see if either alias set is a subset of the other. We need not trace
123
   past immediate descendants, however, since we propagate all
124
   grandchildren up one level.
125
 
126
   Alias set zero is implicitly a superset of all other alias sets.
127
   However, this is no actual entry for alias set zero.  It is an
128
   error to attempt to explicitly construct a subset of zero.  */
129
 
130
struct alias_set_entry GTY(())
131
{
132
  /* The alias set number, as stored in MEM_ALIAS_SET.  */
133
  HOST_WIDE_INT alias_set;
134
 
135
  /* The children of the alias set.  These are not just the immediate
136
     children, but, in fact, all descendants.  So, if we have:
137
 
138
       struct T { struct S s; float f; }
139
 
140
     continuing our example above, the children here will be all of
141
     `int', `double', `float', and `struct S'.  */
142
  splay_tree GTY((param1_is (int), param2_is (int))) children;
143
 
144
  /* Nonzero if would have a child of zero: this effectively makes this
145
     alias set the same as alias set zero.  */
146
  int has_zero_child;
147
};
148
typedef struct alias_set_entry *alias_set_entry;
149
 
150
static int rtx_equal_for_memref_p (rtx, rtx);
151
static rtx find_symbolic_term (rtx);
152
static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
153
static void record_set (rtx, rtx, void *);
154
static int base_alias_check (rtx, rtx, enum machine_mode,
155
                             enum machine_mode);
156
static rtx find_base_value (rtx);
157
static int mems_in_disjoint_alias_sets_p (rtx, rtx);
158
static int insert_subset_children (splay_tree_node, void*);
159
static tree find_base_decl (tree);
160
static alias_set_entry get_alias_set_entry (HOST_WIDE_INT);
161
static rtx fixed_scalar_and_varying_struct_p (rtx, rtx, rtx, rtx,
162
                                              int (*) (rtx, int));
163
static int aliases_everything_p (rtx);
164
static bool nonoverlapping_component_refs_p (tree, tree);
165
static tree decl_for_component_ref (tree);
166
static rtx adjust_offset_for_component_ref (tree, rtx);
167
static int nonoverlapping_memrefs_p (rtx, rtx);
168
static int write_dependence_p (rtx, rtx, int);
169
 
170
static void memory_modified_1 (rtx, rtx, void *);
171
static void record_alias_subset (HOST_WIDE_INT, HOST_WIDE_INT);
172
 
173
/* Set up all info needed to perform alias analysis on memory references.  */
174
 
175
/* Returns the size in bytes of the mode of X.  */
176
#define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
177
 
178
/* Returns nonzero if MEM1 and MEM2 do not alias because they are in
179
   different alias sets.  We ignore alias sets in functions making use
180
   of variable arguments because the va_arg macros on some systems are
181
   not legal ANSI C.  */
182
#define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2)                      \
183
  mems_in_disjoint_alias_sets_p (MEM1, MEM2)
184
 
185
/* Cap the number of passes we make over the insns propagating alias
186
   information through set chains.   10 is a completely arbitrary choice.  */
187
#define MAX_ALIAS_LOOP_PASSES 10
188
 
189
/* reg_base_value[N] gives an address to which register N is related.
190
   If all sets after the first add or subtract to the current value
191
   or otherwise modify it so it does not point to a different top level
192
   object, reg_base_value[N] is equal to the address part of the source
193
   of the first set.
194
 
195
   A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF.  ADDRESS
196
   expressions represent certain special values: function arguments and
197
   the stack, frame, and argument pointers.
198
 
199
   The contents of an ADDRESS is not normally used, the mode of the
200
   ADDRESS determines whether the ADDRESS is a function argument or some
201
   other special value.  Pointer equality, not rtx_equal_p, determines whether
202
   two ADDRESS expressions refer to the same base address.
203
 
204
   The only use of the contents of an ADDRESS is for determining if the
205
   current function performs nonlocal memory memory references for the
206
   purposes of marking the function as a constant function.  */
207
 
208
static GTY(()) VEC(rtx,gc) *reg_base_value;
209
static rtx *new_reg_base_value;
210
 
211
/* We preserve the copy of old array around to avoid amount of garbage
212
   produced.  About 8% of garbage produced were attributed to this
213
   array.  */
214
static GTY((deletable)) VEC(rtx,gc) *old_reg_base_value;
215
 
216
/* Static hunks of RTL used by the aliasing code; these are initialized
217
   once per function to avoid unnecessary RTL allocations.  */
218
static GTY (()) rtx static_reg_base_value[FIRST_PSEUDO_REGISTER];
219
 
220
#define REG_BASE_VALUE(X)                               \
221
  (REGNO (X) < VEC_length (rtx, reg_base_value)         \
222
   ? VEC_index (rtx, reg_base_value, REGNO (X)) : 0)
223
 
224
/* Vector indexed by N giving the initial (unchanging) value known for
225
   pseudo-register N.  This array is initialized in init_alias_analysis,
226
   and does not change until end_alias_analysis is called.  */
227
static GTY((length("reg_known_value_size"))) rtx *reg_known_value;
228
 
229
/* Indicates number of valid entries in reg_known_value.  */
230
static GTY(()) unsigned int reg_known_value_size;
231
 
232
/* Vector recording for each reg_known_value whether it is due to a
233
   REG_EQUIV note.  Future passes (viz., reload) may replace the
234
   pseudo with the equivalent expression and so we account for the
235
   dependences that would be introduced if that happens.
236
 
237
   The REG_EQUIV notes created in assign_parms may mention the arg
238
   pointer, and there are explicit insns in the RTL that modify the
239
   arg pointer.  Thus we must ensure that such insns don't get
240
   scheduled across each other because that would invalidate the
241
   REG_EQUIV notes.  One could argue that the REG_EQUIV notes are
242
   wrong, but solving the problem in the scheduler will likely give
243
   better code, so we do it here.  */
244
static bool *reg_known_equiv_p;
245
 
246
/* True when scanning insns from the start of the rtl to the
247
   NOTE_INSN_FUNCTION_BEG note.  */
248
static bool copying_arguments;
249
 
250
DEF_VEC_P(alias_set_entry);
251
DEF_VEC_ALLOC_P(alias_set_entry,gc);
252
 
253
/* The splay-tree used to store the various alias set entries.  */
254
static GTY (()) VEC(alias_set_entry,gc) *alias_sets;
255
 
256
/* Returns a pointer to the alias set entry for ALIAS_SET, if there is
257
   such an entry, or NULL otherwise.  */
258
 
259
static inline alias_set_entry
260
get_alias_set_entry (HOST_WIDE_INT alias_set)
261
{
262
  return VEC_index (alias_set_entry, alias_sets, alias_set);
263
}
264
 
265
/* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
266
   the two MEMs cannot alias each other.  */
267
 
268
static inline int
269
mems_in_disjoint_alias_sets_p (rtx mem1, rtx mem2)
270
{
271
/* Perform a basic sanity check.  Namely, that there are no alias sets
272
   if we're not using strict aliasing.  This helps to catch bugs
273
   whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
274
   where a MEM is allocated in some way other than by the use of
275
   gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared.  If we begin to
276
   use alias sets to indicate that spilled registers cannot alias each
277
   other, we might need to remove this check.  */
278
  gcc_assert (flag_strict_aliasing
279
              || (!MEM_ALIAS_SET (mem1) && !MEM_ALIAS_SET (mem2)));
280
 
281
  return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
282
}
283
 
284
/* Insert the NODE into the splay tree given by DATA.  Used by
285
   record_alias_subset via splay_tree_foreach.  */
286
 
287
static int
288
insert_subset_children (splay_tree_node node, void *data)
289
{
290
  splay_tree_insert ((splay_tree) data, node->key, node->value);
291
 
292
  return 0;
293
}
294
 
295
/* Return 1 if the two specified alias sets may conflict.  */
296
 
297
int
298
alias_sets_conflict_p (HOST_WIDE_INT set1, HOST_WIDE_INT set2)
299
{
300
  alias_set_entry ase;
301
 
302
  /* If have no alias set information for one of the operands, we have
303
     to assume it can alias anything.  */
304
  if (set1 == 0 || set2 == 0
305
      /* If the two alias sets are the same, they may alias.  */
306
      || set1 == set2)
307
    return 1;
308
 
309
  /* See if the first alias set is a subset of the second.  */
310
  ase = get_alias_set_entry (set1);
311
  if (ase != 0
312
      && (ase->has_zero_child
313
          || splay_tree_lookup (ase->children,
314
                                (splay_tree_key) set2)))
315
    return 1;
316
 
317
  /* Now do the same, but with the alias sets reversed.  */
318
  ase = get_alias_set_entry (set2);
319
  if (ase != 0
320
      && (ase->has_zero_child
321
          || splay_tree_lookup (ase->children,
322
                                (splay_tree_key) set1)))
323
    return 1;
324
 
325
  /* The two alias sets are distinct and neither one is the
326
     child of the other.  Therefore, they cannot alias.  */
327
  return 0;
328
}
329
 
330
/* Return 1 if the two specified alias sets might conflict, or if any subtype
331
   of these alias sets might conflict.  */
332
 
333
int
334
alias_sets_might_conflict_p (HOST_WIDE_INT set1, HOST_WIDE_INT set2)
335
{
336
  if (set1 == 0 || set2 == 0 || set1 == set2)
337
    return 1;
338
 
339
  return 0;
340
}
341
 
342
 
343
/* Return 1 if any MEM object of type T1 will always conflict (using the
344
   dependency routines in this file) with any MEM object of type T2.
345
   This is used when allocating temporary storage.  If T1 and/or T2 are
346
   NULL_TREE, it means we know nothing about the storage.  */
347
 
348
int
349
objects_must_conflict_p (tree t1, tree t2)
350
{
351
  HOST_WIDE_INT set1, set2;
352
 
353
  /* If neither has a type specified, we don't know if they'll conflict
354
     because we may be using them to store objects of various types, for
355
     example the argument and local variables areas of inlined functions.  */
356
  if (t1 == 0 && t2 == 0)
357
    return 0;
358
 
359
  /* If they are the same type, they must conflict.  */
360
  if (t1 == t2
361
      /* Likewise if both are volatile.  */
362
      || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
363
    return 1;
364
 
365
  set1 = t1 ? get_alias_set (t1) : 0;
366
  set2 = t2 ? get_alias_set (t2) : 0;
367
 
368
  /* Otherwise they conflict if they have no alias set or the same. We
369
     can't simply use alias_sets_conflict_p here, because we must make
370
     sure that every subtype of t1 will conflict with every subtype of
371
     t2 for which a pair of subobjects of these respective subtypes
372
     overlaps on the stack.  */
373
  return set1 == 0 || set2 == 0 || set1 == set2;
374
}
375
 
376
/* T is an expression with pointer type.  Find the DECL on which this
377
   expression is based.  (For example, in `a[i]' this would be `a'.)
378
   If there is no such DECL, or a unique decl cannot be determined,
379
   NULL_TREE is returned.  */
380
 
381
static tree
382
find_base_decl (tree t)
383
{
384
  tree d0, d1;
385
 
386
  if (t == 0 || t == error_mark_node || ! POINTER_TYPE_P (TREE_TYPE (t)))
387
    return 0;
388
 
389
  /* If this is a declaration, return it.  If T is based on a restrict
390
     qualified decl, return that decl.  */
391
  if (DECL_P (t))
392
    {
393
      if (TREE_CODE (t) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (t))
394
        t = DECL_GET_RESTRICT_BASE (t);
395
      return t;
396
    }
397
 
398
  /* Handle general expressions.  It would be nice to deal with
399
     COMPONENT_REFs here.  If we could tell that `a' and `b' were the
400
     same, then `a->f' and `b->f' are also the same.  */
401
  switch (TREE_CODE_CLASS (TREE_CODE (t)))
402
    {
403
    case tcc_unary:
404
      return find_base_decl (TREE_OPERAND (t, 0));
405
 
406
    case tcc_binary:
407
      /* Return 0 if found in neither or both are the same.  */
408
      d0 = find_base_decl (TREE_OPERAND (t, 0));
409
      d1 = find_base_decl (TREE_OPERAND (t, 1));
410
      if (d0 == d1)
411
        return d0;
412
      else if (d0 == 0)
413
        return d1;
414
      else if (d1 == 0)
415
        return d0;
416
      else
417
        return 0;
418
 
419
    default:
420
      return 0;
421
    }
422
}
423
 
424
/* Return true if all nested component references handled by
425
   get_inner_reference in T are such that we should use the alias set
426
   provided by the object at the heart of T.
427
 
428
   This is true for non-addressable components (which don't have their
429
   own alias set), as well as components of objects in alias set zero.
430
   This later point is a special case wherein we wish to override the
431
   alias set used by the component, but we don't have per-FIELD_DECL
432
   assignable alias sets.  */
433
 
434
bool
435
component_uses_parent_alias_set (tree t)
436
{
437
  while (1)
438
    {
439
      /* If we're at the end, it vacuously uses its own alias set.  */
440
      if (!handled_component_p (t))
441
        return false;
442
 
443
      switch (TREE_CODE (t))
444
        {
445
        case COMPONENT_REF:
446
          if (DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1)))
447
            return true;
448
          break;
449
 
450
        case ARRAY_REF:
451
        case ARRAY_RANGE_REF:
452
          if (TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0))))
453
            return true;
454
          break;
455
 
456
        case REALPART_EXPR:
457
        case IMAGPART_EXPR:
458
          break;
459
 
460
        default:
461
          /* Bitfields and casts are never addressable.  */
462
          return true;
463
        }
464
 
465
      t = TREE_OPERAND (t, 0);
466
      if (get_alias_set (TREE_TYPE (t)) == 0)
467
        return true;
468
    }
469
}
470
 
471
/* Return the alias set for T, which may be either a type or an
472
   expression.  Call language-specific routine for help, if needed.  */
473
 
474
HOST_WIDE_INT
475
get_alias_set (tree t)
476
{
477
  HOST_WIDE_INT set;
478
 
479
  /* If we're not doing any alias analysis, just assume everything
480
     aliases everything else.  Also return 0 if this or its type is
481
     an error.  */
482
  if (! flag_strict_aliasing || t == error_mark_node
483
      || (! TYPE_P (t)
484
          && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
485
    return 0;
486
 
487
  /* We can be passed either an expression or a type.  This and the
488
     language-specific routine may make mutually-recursive calls to each other
489
     to figure out what to do.  At each juncture, we see if this is a tree
490
     that the language may need to handle specially.  First handle things that
491
     aren't types.  */
492
  if (! TYPE_P (t))
493
    {
494
      tree inner = t;
495
 
496
      /* Remove any nops, then give the language a chance to do
497
         something with this tree before we look at it.  */
498
      STRIP_NOPS (t);
499
      set = lang_hooks.get_alias_set (t);
500
      if (set != -1)
501
        return set;
502
 
503
      /* First see if the actual object referenced is an INDIRECT_REF from a
504
         restrict-qualified pointer or a "void *".  */
505
      while (handled_component_p (inner))
506
        {
507
          inner = TREE_OPERAND (inner, 0);
508
          STRIP_NOPS (inner);
509
        }
510
 
511
      /* Check for accesses through restrict-qualified pointers.  */
512
      if (INDIRECT_REF_P (inner))
513
        {
514
          tree decl = find_base_decl (TREE_OPERAND (inner, 0));
515
 
516
          if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl))
517
            {
518
              /* If we haven't computed the actual alias set, do it now.  */
519
              if (DECL_POINTER_ALIAS_SET (decl) == -2)
520
                {
521
                  tree pointed_to_type = TREE_TYPE (TREE_TYPE (decl));
522
 
523
                  /* No two restricted pointers can point at the same thing.
524
                     However, a restricted pointer can point at the same thing
525
                     as an unrestricted pointer, if that unrestricted pointer
526
                     is based on the restricted pointer.  So, we make the
527
                     alias set for the restricted pointer a subset of the
528
                     alias set for the type pointed to by the type of the
529
                     decl.  */
530
                  HOST_WIDE_INT pointed_to_alias_set
531
                    = get_alias_set (pointed_to_type);
532
 
533
                  if (pointed_to_alias_set == 0)
534
                    /* It's not legal to make a subset of alias set zero.  */
535
                    DECL_POINTER_ALIAS_SET (decl) = 0;
536
                  else if (AGGREGATE_TYPE_P (pointed_to_type))
537
                    /* For an aggregate, we must treat the restricted
538
                       pointer the same as an ordinary pointer.  If we
539
                       were to make the type pointed to by the
540
                       restricted pointer a subset of the pointed-to
541
                       type, then we would believe that other subsets
542
                       of the pointed-to type (such as fields of that
543
                       type) do not conflict with the type pointed to
544
                       by the restricted pointer.  */
545
                    DECL_POINTER_ALIAS_SET (decl)
546
                      = pointed_to_alias_set;
547
                  else
548
                    {
549
                      DECL_POINTER_ALIAS_SET (decl) = new_alias_set ();
550
                      record_alias_subset (pointed_to_alias_set,
551
                                           DECL_POINTER_ALIAS_SET (decl));
552
                    }
553
                }
554
 
555
              /* We use the alias set indicated in the declaration.  */
556
              return DECL_POINTER_ALIAS_SET (decl);
557
            }
558
 
559
          /* If we have an INDIRECT_REF via a void pointer, we don't
560
             know anything about what that might alias.  Likewise if the
561
             pointer is marked that way.  */
562
          else if (TREE_CODE (TREE_TYPE (inner)) == VOID_TYPE
563
                   || (TYPE_REF_CAN_ALIAS_ALL
564
                       (TREE_TYPE (TREE_OPERAND (inner, 0)))))
565
            return 0;
566
        }
567
 
568
      /* Otherwise, pick up the outermost object that we could have a pointer
569
         to, processing conversions as above.  */
570
      while (component_uses_parent_alias_set (t))
571
        {
572
          t = TREE_OPERAND (t, 0);
573
          STRIP_NOPS (t);
574
        }
575
 
576
      /* If we've already determined the alias set for a decl, just return
577
         it.  This is necessary for C++ anonymous unions, whose component
578
         variables don't look like union members (boo!).  */
579
      if (TREE_CODE (t) == VAR_DECL
580
          && DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t)))
581
        return MEM_ALIAS_SET (DECL_RTL (t));
582
 
583
      /* Now all we care about is the type.  */
584
      t = TREE_TYPE (t);
585
    }
586
 
587
  /* Variant qualifiers don't affect the alias set, so get the main
588
     variant. If this is a type with a known alias set, return it.  */
589
  t = TYPE_MAIN_VARIANT (t);
590
  if (TYPE_ALIAS_SET_KNOWN_P (t))
591
    return TYPE_ALIAS_SET (t);
592
 
593
  /* See if the language has special handling for this type.  */
594
  set = lang_hooks.get_alias_set (t);
595
  if (set != -1)
596
    return set;
597
 
598
  /* There are no objects of FUNCTION_TYPE, so there's no point in
599
     using up an alias set for them.  (There are, of course, pointers
600
     and references to functions, but that's different.)  */
601
  else if (TREE_CODE (t) == FUNCTION_TYPE)
602
    set = 0;
603
 
604
  /* Unless the language specifies otherwise, let vector types alias
605
     their components.  This avoids some nasty type punning issues in
606
     normal usage.  And indeed lets vectors be treated more like an
607
     array slice.  */
608
  else if (TREE_CODE (t) == VECTOR_TYPE)
609
    set = get_alias_set (TREE_TYPE (t));
610
 
611
  else
612
    /* Otherwise make a new alias set for this type.  */
613
    set = new_alias_set ();
614
 
615
  TYPE_ALIAS_SET (t) = set;
616
 
617
  /* If this is an aggregate type, we must record any component aliasing
618
     information.  */
619
  if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
620
    record_component_aliases (t);
621
 
622
  return set;
623
}
624
 
625
/* Return a brand-new alias set.  */
626
 
627
HOST_WIDE_INT
628
new_alias_set (void)
629
{
630
  if (flag_strict_aliasing)
631
    {
632
      if (alias_sets == 0)
633
        VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
634
      VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
635
      return VEC_length (alias_set_entry, alias_sets) - 1;
636
    }
637
  else
638
    return 0;
639
}
640
 
641
/* Indicate that things in SUBSET can alias things in SUPERSET, but that
642
   not everything that aliases SUPERSET also aliases SUBSET.  For example,
643
   in C, a store to an `int' can alias a load of a structure containing an
644
   `int', and vice versa.  But it can't alias a load of a 'double' member
645
   of the same structure.  Here, the structure would be the SUPERSET and
646
   `int' the SUBSET.  This relationship is also described in the comment at
647
   the beginning of this file.
648
 
649
   This function should be called only once per SUPERSET/SUBSET pair.
650
 
651
   It is illegal for SUPERSET to be zero; everything is implicitly a
652
   subset of alias set zero.  */
653
 
654
static void
655
record_alias_subset (HOST_WIDE_INT superset, HOST_WIDE_INT subset)
656
{
657
  alias_set_entry superset_entry;
658
  alias_set_entry subset_entry;
659
 
660
  /* It is possible in complex type situations for both sets to be the same,
661
     in which case we can ignore this operation.  */
662
  if (superset == subset)
663
    return;
664
 
665
  gcc_assert (superset);
666
 
667
  superset_entry = get_alias_set_entry (superset);
668
  if (superset_entry == 0)
669
    {
670
      /* Create an entry for the SUPERSET, so that we have a place to
671
         attach the SUBSET.  */
672
      superset_entry = ggc_alloc (sizeof (struct alias_set_entry));
673
      superset_entry->alias_set = superset;
674
      superset_entry->children
675
        = splay_tree_new_ggc (splay_tree_compare_ints);
676
      superset_entry->has_zero_child = 0;
677
      VEC_replace (alias_set_entry, alias_sets, superset, superset_entry);
678
    }
679
 
680
  if (subset == 0)
681
    superset_entry->has_zero_child = 1;
682
  else
683
    {
684
      subset_entry = get_alias_set_entry (subset);
685
      /* If there is an entry for the subset, enter all of its children
686
         (if they are not already present) as children of the SUPERSET.  */
687
      if (subset_entry)
688
        {
689
          if (subset_entry->has_zero_child)
690
            superset_entry->has_zero_child = 1;
691
 
692
          splay_tree_foreach (subset_entry->children, insert_subset_children,
693
                              superset_entry->children);
694
        }
695
 
696
      /* Enter the SUBSET itself as a child of the SUPERSET.  */
697
      splay_tree_insert (superset_entry->children,
698
                         (splay_tree_key) subset, 0);
699
    }
700
}
701
 
702
/* Record that component types of TYPE, if any, are part of that type for
703
   aliasing purposes.  For record types, we only record component types
704
   for fields that are marked addressable.  For array types, we always
705
   record the component types, so the front end should not call this
706
   function if the individual component aren't addressable.  */
707
 
708
void
709
record_component_aliases (tree type)
710
{
711
  HOST_WIDE_INT superset = get_alias_set (type);
712
  tree field;
713
 
714
  if (superset == 0)
715
    return;
716
 
717
  switch (TREE_CODE (type))
718
    {
719
    case ARRAY_TYPE:
720
      if (! TYPE_NONALIASED_COMPONENT (type))
721
        record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
722
      break;
723
 
724
    case RECORD_TYPE:
725
    case UNION_TYPE:
726
    case QUAL_UNION_TYPE:
727
      /* Recursively record aliases for the base classes, if there are any.  */
728
      if (TYPE_BINFO (type))
729
        {
730
          int i;
731
          tree binfo, base_binfo;
732
 
733
          for (binfo = TYPE_BINFO (type), i = 0;
734
               BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
735
            record_alias_subset (superset,
736
                                 get_alias_set (BINFO_TYPE (base_binfo)));
737
        }
738
      for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
739
        if (TREE_CODE (field) == FIELD_DECL && ! DECL_NONADDRESSABLE_P (field))
740
          record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
741
      break;
742
 
743
    case COMPLEX_TYPE:
744
      record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
745
      break;
746
 
747
    default:
748
      break;
749
    }
750
}
751
 
752
/* Allocate an alias set for use in storing and reading from the varargs
753
   spill area.  */
754
 
755
static GTY(()) HOST_WIDE_INT varargs_set = -1;
756
 
757
HOST_WIDE_INT
758
get_varargs_alias_set (void)
759
{
760
#if 1
761
  /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
762
     varargs alias set to an INDIRECT_REF (FIXME!), so we can't
763
     consistently use the varargs alias set for loads from the varargs
764
     area.  So don't use it anywhere.  */
765
  return 0;
766
#else
767
  if (varargs_set == -1)
768
    varargs_set = new_alias_set ();
769
 
770
  return varargs_set;
771
#endif
772
}
773
 
774
/* Likewise, but used for the fixed portions of the frame, e.g., register
775
   save areas.  */
776
 
777
static GTY(()) HOST_WIDE_INT frame_set = -1;
778
 
779
HOST_WIDE_INT
780
get_frame_alias_set (void)
781
{
782
  if (frame_set == -1)
783
    frame_set = new_alias_set ();
784
 
785
  return frame_set;
786
}
787
 
788
/* Inside SRC, the source of a SET, find a base address.  */
789
 
790
static rtx
791
find_base_value (rtx src)
792
{
793
  unsigned int regno;
794
 
795
  switch (GET_CODE (src))
796
    {
797
    case SYMBOL_REF:
798
    case LABEL_REF:
799
      return src;
800
 
801
    case REG:
802
      regno = REGNO (src);
803
      /* At the start of a function, argument registers have known base
804
         values which may be lost later.  Returning an ADDRESS
805
         expression here allows optimization based on argument values
806
         even when the argument registers are used for other purposes.  */
807
      if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
808
        return new_reg_base_value[regno];
809
 
810
      /* If a pseudo has a known base value, return it.  Do not do this
811
         for non-fixed hard regs since it can result in a circular
812
         dependency chain for registers which have values at function entry.
813
 
814
         The test above is not sufficient because the scheduler may move
815
         a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN.  */
816
      if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
817
          && regno < VEC_length (rtx, reg_base_value))
818
        {
819
          /* If we're inside init_alias_analysis, use new_reg_base_value
820
             to reduce the number of relaxation iterations.  */
821
          if (new_reg_base_value && new_reg_base_value[regno]
822
              && REG_N_SETS (regno) == 1)
823
            return new_reg_base_value[regno];
824
 
825
          if (VEC_index (rtx, reg_base_value, regno))
826
            return VEC_index (rtx, reg_base_value, regno);
827
        }
828
 
829
      return 0;
830
 
831
    case MEM:
832
      /* Check for an argument passed in memory.  Only record in the
833
         copying-arguments block; it is too hard to track changes
834
         otherwise.  */
835
      if (copying_arguments
836
          && (XEXP (src, 0) == arg_pointer_rtx
837
              || (GET_CODE (XEXP (src, 0)) == PLUS
838
                  && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
839
        return gen_rtx_ADDRESS (VOIDmode, src);
840
      return 0;
841
 
842
    case CONST:
843
      src = XEXP (src, 0);
844
      if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
845
        break;
846
 
847
      /* ... fall through ...  */
848
 
849
    case PLUS:
850
    case MINUS:
851
      {
852
        rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
853
 
854
        /* If either operand is a REG that is a known pointer, then it
855
           is the base.  */
856
        if (REG_P (src_0) && REG_POINTER (src_0))
857
          return find_base_value (src_0);
858
        if (REG_P (src_1) && REG_POINTER (src_1))
859
          return find_base_value (src_1);
860
 
861
        /* If either operand is a REG, then see if we already have
862
           a known value for it.  */
863
        if (REG_P (src_0))
864
          {
865
            temp = find_base_value (src_0);
866
            if (temp != 0)
867
              src_0 = temp;
868
          }
869
 
870
        if (REG_P (src_1))
871
          {
872
            temp = find_base_value (src_1);
873
            if (temp!= 0)
874
              src_1 = temp;
875
          }
876
 
877
        /* If either base is named object or a special address
878
           (like an argument or stack reference), then use it for the
879
           base term.  */
880
        if (src_0 != 0
881
            && (GET_CODE (src_0) == SYMBOL_REF
882
                || GET_CODE (src_0) == LABEL_REF
883
                || (GET_CODE (src_0) == ADDRESS
884
                    && GET_MODE (src_0) != VOIDmode)))
885
          return src_0;
886
 
887
        if (src_1 != 0
888
            && (GET_CODE (src_1) == SYMBOL_REF
889
                || GET_CODE (src_1) == LABEL_REF
890
                || (GET_CODE (src_1) == ADDRESS
891
                    && GET_MODE (src_1) != VOIDmode)))
892
          return src_1;
893
 
894
        /* Guess which operand is the base address:
895
           If either operand is a symbol, then it is the base.  If
896
           either operand is a CONST_INT, then the other is the base.  */
897
        if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
898
          return find_base_value (src_0);
899
        else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
900
          return find_base_value (src_1);
901
 
902
        return 0;
903
      }
904
 
905
    case LO_SUM:
906
      /* The standard form is (lo_sum reg sym) so look only at the
907
         second operand.  */
908
      return find_base_value (XEXP (src, 1));
909
 
910
    case AND:
911
      /* If the second operand is constant set the base
912
         address to the first operand.  */
913
      if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
914
        return find_base_value (XEXP (src, 0));
915
      return 0;
916
 
917
    case TRUNCATE:
918
      if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
919
        break;
920
      /* Fall through.  */
921
    case HIGH:
922
    case PRE_INC:
923
    case PRE_DEC:
924
    case POST_INC:
925
    case POST_DEC:
926
    case PRE_MODIFY:
927
    case POST_MODIFY:
928
      return find_base_value (XEXP (src, 0));
929
 
930
    case ZERO_EXTEND:
931
    case SIGN_EXTEND:   /* used for NT/Alpha pointers */
932
      {
933
        rtx temp = find_base_value (XEXP (src, 0));
934
 
935
        if (temp != 0 && CONSTANT_P (temp))
936
          temp = convert_memory_address (Pmode, temp);
937
 
938
        return temp;
939
      }
940
 
941
    default:
942
      break;
943
    }
944
 
945
  return 0;
946
}
947
 
948
/* Called from init_alias_analysis indirectly through note_stores.  */
949
 
950
/* While scanning insns to find base values, reg_seen[N] is nonzero if
951
   register N has been set in this function.  */
952
static char *reg_seen;
953
 
954
/* Addresses which are known not to alias anything else are identified
955
   by a unique integer.  */
956
static int unique_id;
957
 
958
static void
959
record_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
960
{
961
  unsigned regno;
962
  rtx src;
963
  int n;
964
 
965
  if (!REG_P (dest))
966
    return;
967
 
968
  regno = REGNO (dest);
969
 
970
  gcc_assert (regno < VEC_length (rtx, reg_base_value));
971
 
972
  /* If this spans multiple hard registers, then we must indicate that every
973
     register has an unusable value.  */
974
  if (regno < FIRST_PSEUDO_REGISTER)
975
    n = hard_regno_nregs[regno][GET_MODE (dest)];
976
  else
977
    n = 1;
978
  if (n != 1)
979
    {
980
      while (--n >= 0)
981
        {
982
          reg_seen[regno + n] = 1;
983
          new_reg_base_value[regno + n] = 0;
984
        }
985
      return;
986
    }
987
 
988
  if (set)
989
    {
990
      /* A CLOBBER wipes out any old value but does not prevent a previously
991
         unset register from acquiring a base address (i.e. reg_seen is not
992
         set).  */
993
      if (GET_CODE (set) == CLOBBER)
994
        {
995
          new_reg_base_value[regno] = 0;
996
          return;
997
        }
998
      src = SET_SRC (set);
999
    }
1000
  else
1001
    {
1002
      if (reg_seen[regno])
1003
        {
1004
          new_reg_base_value[regno] = 0;
1005
          return;
1006
        }
1007
      reg_seen[regno] = 1;
1008
      new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
1009
                                                   GEN_INT (unique_id++));
1010
      return;
1011
    }
1012
 
1013
  /* If this is not the first set of REGNO, see whether the new value
1014
     is related to the old one.  There are two cases of interest:
1015
 
1016
        (1) The register might be assigned an entirely new value
1017
            that has the same base term as the original set.
1018
 
1019
        (2) The set might be a simple self-modification that
1020
            cannot change REGNO's base value.
1021
 
1022
     If neither case holds, reject the original base value as invalid.
1023
     Note that the following situation is not detected:
1024
 
1025
         extern int x, y;  int *p = &x; p += (&y-&x);
1026
 
1027
     ANSI C does not allow computing the difference of addresses
1028
     of distinct top level objects.  */
1029
  if (new_reg_base_value[regno] != 0
1030
      && find_base_value (src) != new_reg_base_value[regno])
1031
    switch (GET_CODE (src))
1032
      {
1033
      case LO_SUM:
1034
      case MINUS:
1035
        if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
1036
          new_reg_base_value[regno] = 0;
1037
        break;
1038
      case PLUS:
1039
        /* If the value we add in the PLUS is also a valid base value,
1040
           this might be the actual base value, and the original value
1041
           an index.  */
1042
        {
1043
          rtx other = NULL_RTX;
1044
 
1045
          if (XEXP (src, 0) == dest)
1046
            other = XEXP (src, 1);
1047
          else if (XEXP (src, 1) == dest)
1048
            other = XEXP (src, 0);
1049
 
1050
          if (! other || find_base_value (other))
1051
            new_reg_base_value[regno] = 0;
1052
          break;
1053
        }
1054
      case AND:
1055
        if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
1056
          new_reg_base_value[regno] = 0;
1057
        break;
1058
      default:
1059
        new_reg_base_value[regno] = 0;
1060
        break;
1061
      }
1062
  /* If this is the first set of a register, record the value.  */
1063
  else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
1064
           && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
1065
    new_reg_base_value[regno] = find_base_value (src);
1066
 
1067
  reg_seen[regno] = 1;
1068
}
1069
 
1070
/* Clear alias info for a register.  This is used if an RTL transformation
1071
   changes the value of a register.  This is used in flow by AUTO_INC_DEC
1072
   optimizations.  We don't need to clear reg_base_value, since flow only
1073
   changes the offset.  */
1074
 
1075
void
1076
clear_reg_alias_info (rtx reg)
1077
{
1078
  unsigned int regno = REGNO (reg);
1079
 
1080
  if (regno >= FIRST_PSEUDO_REGISTER)
1081
    {
1082
      regno -= FIRST_PSEUDO_REGISTER;
1083
      if (regno < reg_known_value_size)
1084
        {
1085
          reg_known_value[regno] = reg;
1086
          reg_known_equiv_p[regno] = false;
1087
        }
1088
    }
1089
}
1090
 
1091
/* If a value is known for REGNO, return it.  */
1092
 
1093
rtx
1094
get_reg_known_value (unsigned int regno)
1095
{
1096
  if (regno >= FIRST_PSEUDO_REGISTER)
1097
    {
1098
      regno -= FIRST_PSEUDO_REGISTER;
1099
      if (regno < reg_known_value_size)
1100
        return reg_known_value[regno];
1101
    }
1102
  return NULL;
1103
}
1104
 
1105
/* Set it.  */
1106
 
1107
static void
1108
set_reg_known_value (unsigned int regno, rtx val)
1109
{
1110
  if (regno >= FIRST_PSEUDO_REGISTER)
1111
    {
1112
      regno -= FIRST_PSEUDO_REGISTER;
1113
      if (regno < reg_known_value_size)
1114
        reg_known_value[regno] = val;
1115
    }
1116
}
1117
 
1118
/* Similarly for reg_known_equiv_p.  */
1119
 
1120
bool
1121
get_reg_known_equiv_p (unsigned int regno)
1122
{
1123
  if (regno >= FIRST_PSEUDO_REGISTER)
1124
    {
1125
      regno -= FIRST_PSEUDO_REGISTER;
1126
      if (regno < reg_known_value_size)
1127
        return reg_known_equiv_p[regno];
1128
    }
1129
  return false;
1130
}
1131
 
1132
static void
1133
set_reg_known_equiv_p (unsigned int regno, bool val)
1134
{
1135
  if (regno >= FIRST_PSEUDO_REGISTER)
1136
    {
1137
      regno -= FIRST_PSEUDO_REGISTER;
1138
      if (regno < reg_known_value_size)
1139
        reg_known_equiv_p[regno] = val;
1140
    }
1141
}
1142
 
1143
 
1144
/* Returns a canonical version of X, from the point of view alias
1145
   analysis.  (For example, if X is a MEM whose address is a register,
1146
   and the register has a known value (say a SYMBOL_REF), then a MEM
1147
   whose address is the SYMBOL_REF is returned.)  */
1148
 
1149
rtx
1150
canon_rtx (rtx x)
1151
{
1152
  /* Recursively look for equivalences.  */
1153
  if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1154
    {
1155
      rtx t = get_reg_known_value (REGNO (x));
1156
      if (t == x)
1157
        return x;
1158
      if (t)
1159
        return canon_rtx (t);
1160
    }
1161
 
1162
  if (GET_CODE (x) == PLUS)
1163
    {
1164
      rtx x0 = canon_rtx (XEXP (x, 0));
1165
      rtx x1 = canon_rtx (XEXP (x, 1));
1166
 
1167
      if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1168
        {
1169
          if (GET_CODE (x0) == CONST_INT)
1170
            return plus_constant (x1, INTVAL (x0));
1171
          else if (GET_CODE (x1) == CONST_INT)
1172
            return plus_constant (x0, INTVAL (x1));
1173
          return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1174
        }
1175
    }
1176
 
1177
  /* This gives us much better alias analysis when called from
1178
     the loop optimizer.   Note we want to leave the original
1179
     MEM alone, but need to return the canonicalized MEM with
1180
     all the flags with their original values.  */
1181
  else if (MEM_P (x))
1182
    x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1183
 
1184
  return x;
1185
}
1186
 
1187
/* Return 1 if X and Y are identical-looking rtx's.
1188
   Expect that X and Y has been already canonicalized.
1189
 
1190
   We use the data in reg_known_value above to see if two registers with
1191
   different numbers are, in fact, equivalent.  */
1192
 
1193
static int
1194
rtx_equal_for_memref_p (rtx x, rtx y)
1195
{
1196
  int i;
1197
  int j;
1198
  enum rtx_code code;
1199
  const char *fmt;
1200
 
1201
  if (x == 0 && y == 0)
1202
    return 1;
1203
  if (x == 0 || y == 0)
1204
    return 0;
1205
 
1206
  if (x == y)
1207
    return 1;
1208
 
1209
  code = GET_CODE (x);
1210
  /* Rtx's of different codes cannot be equal.  */
1211
  if (code != GET_CODE (y))
1212
    return 0;
1213
 
1214
  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1215
     (REG:SI x) and (REG:HI x) are NOT equivalent.  */
1216
 
1217
  if (GET_MODE (x) != GET_MODE (y))
1218
    return 0;
1219
 
1220
  /* Some RTL can be compared without a recursive examination.  */
1221
  switch (code)
1222
    {
1223
    case REG:
1224
      return REGNO (x) == REGNO (y);
1225
 
1226
    case LABEL_REF:
1227
      return XEXP (x, 0) == XEXP (y, 0);
1228
 
1229
    case SYMBOL_REF:
1230
      return XSTR (x, 0) == XSTR (y, 0);
1231
 
1232
    case VALUE:
1233
    case CONST_INT:
1234
    case CONST_DOUBLE:
1235
      /* There's no need to compare the contents of CONST_DOUBLEs or
1236
         CONST_INTs because pointer equality is a good enough
1237
         comparison for these nodes.  */
1238
      return 0;
1239
 
1240
    default:
1241
      break;
1242
    }
1243
 
1244
  /* canon_rtx knows how to handle plus.  No need to canonicalize.  */
1245
  if (code == PLUS)
1246
    return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1247
             && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1248
            || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1249
                && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1250
  /* For commutative operations, the RTX match if the operand match in any
1251
     order.  Also handle the simple binary and unary cases without a loop.  */
1252
  if (COMMUTATIVE_P (x))
1253
    {
1254
      rtx xop0 = canon_rtx (XEXP (x, 0));
1255
      rtx yop0 = canon_rtx (XEXP (y, 0));
1256
      rtx yop1 = canon_rtx (XEXP (y, 1));
1257
 
1258
      return ((rtx_equal_for_memref_p (xop0, yop0)
1259
               && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1260
              || (rtx_equal_for_memref_p (xop0, yop1)
1261
                  && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1262
    }
1263
  else if (NON_COMMUTATIVE_P (x))
1264
    {
1265
      return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1266
                                      canon_rtx (XEXP (y, 0)))
1267
              && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1268
                                         canon_rtx (XEXP (y, 1))));
1269
    }
1270
  else if (UNARY_P (x))
1271
    return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1272
                                   canon_rtx (XEXP (y, 0)));
1273
 
1274
  /* Compare the elements.  If any pair of corresponding elements
1275
     fail to match, return 0 for the whole things.
1276
 
1277
     Limit cases to types which actually appear in addresses.  */
1278
 
1279
  fmt = GET_RTX_FORMAT (code);
1280
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1281
    {
1282
      switch (fmt[i])
1283
        {
1284
        case 'i':
1285
          if (XINT (x, i) != XINT (y, i))
1286
            return 0;
1287
          break;
1288
 
1289
        case 'E':
1290
          /* Two vectors must have the same length.  */
1291
          if (XVECLEN (x, i) != XVECLEN (y, i))
1292
            return 0;
1293
 
1294
          /* And the corresponding elements must match.  */
1295
          for (j = 0; j < XVECLEN (x, i); j++)
1296
            if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1297
                                        canon_rtx (XVECEXP (y, i, j))) == 0)
1298
              return 0;
1299
          break;
1300
 
1301
        case 'e':
1302
          if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1303
                                      canon_rtx (XEXP (y, i))) == 0)
1304
            return 0;
1305
          break;
1306
 
1307
          /* This can happen for asm operands.  */
1308
        case 's':
1309
          if (strcmp (XSTR (x, i), XSTR (y, i)))
1310
            return 0;
1311
          break;
1312
 
1313
        /* This can happen for an asm which clobbers memory.  */
1314
        case '0':
1315
          break;
1316
 
1317
          /* It is believed that rtx's at this level will never
1318
             contain anything but integers and other rtx's,
1319
             except for within LABEL_REFs and SYMBOL_REFs.  */
1320
        default:
1321
          gcc_unreachable ();
1322
        }
1323
    }
1324
  return 1;
1325
}
1326
 
1327
/* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
1328
   X and return it, or return 0 if none found.  */
1329
 
1330
static rtx
1331
find_symbolic_term (rtx x)
1332
{
1333
  int i;
1334
  enum rtx_code code;
1335
  const char *fmt;
1336
 
1337
  code = GET_CODE (x);
1338
  if (code == SYMBOL_REF || code == LABEL_REF)
1339
    return x;
1340
  if (OBJECT_P (x))
1341
    return 0;
1342
 
1343
  fmt = GET_RTX_FORMAT (code);
1344
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1345
    {
1346
      rtx t;
1347
 
1348
      if (fmt[i] == 'e')
1349
        {
1350
          t = find_symbolic_term (XEXP (x, i));
1351
          if (t != 0)
1352
            return t;
1353
        }
1354
      else if (fmt[i] == 'E')
1355
        break;
1356
    }
1357
  return 0;
1358
}
1359
 
1360
rtx
1361
find_base_term (rtx x)
1362
{
1363
  cselib_val *val;
1364
  struct elt_loc_list *l;
1365
 
1366
#if defined (FIND_BASE_TERM)
1367
  /* Try machine-dependent ways to find the base term.  */
1368
  x = FIND_BASE_TERM (x);
1369
#endif
1370
 
1371
  switch (GET_CODE (x))
1372
    {
1373
    case REG:
1374
      return REG_BASE_VALUE (x);
1375
 
1376
    case TRUNCATE:
1377
      if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1378
        return 0;
1379
      /* Fall through.  */
1380
    case HIGH:
1381
    case PRE_INC:
1382
    case PRE_DEC:
1383
    case POST_INC:
1384
    case POST_DEC:
1385
    case PRE_MODIFY:
1386
    case POST_MODIFY:
1387
      return find_base_term (XEXP (x, 0));
1388
 
1389
    case ZERO_EXTEND:
1390
    case SIGN_EXTEND:   /* Used for Alpha/NT pointers */
1391
      {
1392
        rtx temp = find_base_term (XEXP (x, 0));
1393
 
1394
        if (temp != 0 && CONSTANT_P (temp))
1395
          temp = convert_memory_address (Pmode, temp);
1396
 
1397
        return temp;
1398
      }
1399
 
1400
    case VALUE:
1401
      val = CSELIB_VAL_PTR (x);
1402
      if (!val)
1403
        return 0;
1404
      for (l = val->locs; l; l = l->next)
1405
        if ((x = find_base_term (l->loc)) != 0)
1406
          return x;
1407
      return 0;
1408
 
1409
    case CONST:
1410
      x = XEXP (x, 0);
1411
      if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1412
        return 0;
1413
      /* Fall through.  */
1414
    case LO_SUM:
1415
    case PLUS:
1416
    case MINUS:
1417
      {
1418
        rtx tmp1 = XEXP (x, 0);
1419
        rtx tmp2 = XEXP (x, 1);
1420
 
1421
        /* This is a little bit tricky since we have to determine which of
1422
           the two operands represents the real base address.  Otherwise this
1423
           routine may return the index register instead of the base register.
1424
 
1425
           That may cause us to believe no aliasing was possible, when in
1426
           fact aliasing is possible.
1427
 
1428
           We use a few simple tests to guess the base register.  Additional
1429
           tests can certainly be added.  For example, if one of the operands
1430
           is a shift or multiply, then it must be the index register and the
1431
           other operand is the base register.  */
1432
 
1433
        if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1434
          return find_base_term (tmp2);
1435
 
1436
        /* If either operand is known to be a pointer, then use it
1437
           to determine the base term.  */
1438
        if (REG_P (tmp1) && REG_POINTER (tmp1))
1439
          return find_base_term (tmp1);
1440
 
1441
        if (REG_P (tmp2) && REG_POINTER (tmp2))
1442
          return find_base_term (tmp2);
1443
 
1444
        /* Neither operand was known to be a pointer.  Go ahead and find the
1445
           base term for both operands.  */
1446
        tmp1 = find_base_term (tmp1);
1447
        tmp2 = find_base_term (tmp2);
1448
 
1449
        /* If either base term is named object or a special address
1450
           (like an argument or stack reference), then use it for the
1451
           base term.  */
1452
        if (tmp1 != 0
1453
            && (GET_CODE (tmp1) == SYMBOL_REF
1454
                || GET_CODE (tmp1) == LABEL_REF
1455
                || (GET_CODE (tmp1) == ADDRESS
1456
                    && GET_MODE (tmp1) != VOIDmode)))
1457
          return tmp1;
1458
 
1459
        if (tmp2 != 0
1460
            && (GET_CODE (tmp2) == SYMBOL_REF
1461
                || GET_CODE (tmp2) == LABEL_REF
1462
                || (GET_CODE (tmp2) == ADDRESS
1463
                    && GET_MODE (tmp2) != VOIDmode)))
1464
          return tmp2;
1465
 
1466
        /* We could not determine which of the two operands was the
1467
           base register and which was the index.  So we can determine
1468
           nothing from the base alias check.  */
1469
        return 0;
1470
      }
1471
 
1472
    case AND:
1473
      if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) != 0)
1474
        return find_base_term (XEXP (x, 0));
1475
      return 0;
1476
 
1477
    case SYMBOL_REF:
1478
    case LABEL_REF:
1479
      return x;
1480
 
1481
    default:
1482
      return 0;
1483
    }
1484
}
1485
 
1486
/* Return 0 if the addresses X and Y are known to point to different
1487
   objects, 1 if they might be pointers to the same object.  */
1488
 
1489
static int
1490
base_alias_check (rtx x, rtx y, enum machine_mode x_mode,
1491
                  enum machine_mode y_mode)
1492
{
1493
  rtx x_base = find_base_term (x);
1494
  rtx y_base = find_base_term (y);
1495
 
1496
  /* If the address itself has no known base see if a known equivalent
1497
     value has one.  If either address still has no known base, nothing
1498
     is known about aliasing.  */
1499
  if (x_base == 0)
1500
    {
1501
      rtx x_c;
1502
 
1503
      if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1504
        return 1;
1505
 
1506
      x_base = find_base_term (x_c);
1507
      if (x_base == 0)
1508
        return 1;
1509
    }
1510
 
1511
  if (y_base == 0)
1512
    {
1513
      rtx y_c;
1514
      if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1515
        return 1;
1516
 
1517
      y_base = find_base_term (y_c);
1518
      if (y_base == 0)
1519
        return 1;
1520
    }
1521
 
1522
  /* If the base addresses are equal nothing is known about aliasing.  */
1523
  if (rtx_equal_p (x_base, y_base))
1524
    return 1;
1525
 
1526
  /* The base addresses of the read and write are different expressions.
1527
     If they are both symbols and they are not accessed via AND, there is
1528
     no conflict.  We can bring knowledge of object alignment into play
1529
     here.  For example, on alpha, "char a, b;" can alias one another,
1530
     though "char a; long b;" cannot.  */
1531
  if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1532
    {
1533
      if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1534
        return 1;
1535
      if (GET_CODE (x) == AND
1536
          && (GET_CODE (XEXP (x, 1)) != CONST_INT
1537
              || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1538
        return 1;
1539
      if (GET_CODE (y) == AND
1540
          && (GET_CODE (XEXP (y, 1)) != CONST_INT
1541
              || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1542
        return 1;
1543
      /* Differing symbols never alias.  */
1544
      return 0;
1545
    }
1546
 
1547
  /* If one address is a stack reference there can be no alias:
1548
     stack references using different base registers do not alias,
1549
     a stack reference can not alias a parameter, and a stack reference
1550
     can not alias a global.  */
1551
  if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1552
      || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1553
    return 0;
1554
 
1555
  if (! flag_argument_noalias)
1556
    return 1;
1557
 
1558
  if (flag_argument_noalias > 1)
1559
    return 0;
1560
 
1561
  /* Weak noalias assertion (arguments are distinct, but may match globals).  */
1562
  return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
1563
}
1564
 
1565
/* Convert the address X into something we can use.  This is done by returning
1566
   it unchanged unless it is a value; in the latter case we call cselib to get
1567
   a more useful rtx.  */
1568
 
1569
rtx
1570
get_addr (rtx x)
1571
{
1572
  cselib_val *v;
1573
  struct elt_loc_list *l;
1574
 
1575
  if (GET_CODE (x) != VALUE)
1576
    return x;
1577
  v = CSELIB_VAL_PTR (x);
1578
  if (v)
1579
    {
1580
      for (l = v->locs; l; l = l->next)
1581
        if (CONSTANT_P (l->loc))
1582
          return l->loc;
1583
      for (l = v->locs; l; l = l->next)
1584
        if (!REG_P (l->loc) && !MEM_P (l->loc))
1585
          return l->loc;
1586
      if (v->locs)
1587
        return v->locs->loc;
1588
    }
1589
  return x;
1590
}
1591
 
1592
/*  Return the address of the (N_REFS + 1)th memory reference to ADDR
1593
    where SIZE is the size in bytes of the memory reference.  If ADDR
1594
    is not modified by the memory reference then ADDR is returned.  */
1595
 
1596
static rtx
1597
addr_side_effect_eval (rtx addr, int size, int n_refs)
1598
{
1599
  int offset = 0;
1600
 
1601
  switch (GET_CODE (addr))
1602
    {
1603
    case PRE_INC:
1604
      offset = (n_refs + 1) * size;
1605
      break;
1606
    case PRE_DEC:
1607
      offset = -(n_refs + 1) * size;
1608
      break;
1609
    case POST_INC:
1610
      offset = n_refs * size;
1611
      break;
1612
    case POST_DEC:
1613
      offset = -n_refs * size;
1614
      break;
1615
 
1616
    default:
1617
      return addr;
1618
    }
1619
 
1620
  if (offset)
1621
    addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
1622
                         GEN_INT (offset));
1623
  else
1624
    addr = XEXP (addr, 0);
1625
  addr = canon_rtx (addr);
1626
 
1627
  return addr;
1628
}
1629
 
1630
/* Return nonzero if X and Y (memory addresses) could reference the
1631
   same location in memory.  C is an offset accumulator.  When
1632
   C is nonzero, we are testing aliases between X and Y + C.
1633
   XSIZE is the size in bytes of the X reference,
1634
   similarly YSIZE is the size in bytes for Y.
1635
   Expect that canon_rtx has been already called for X and Y.
1636
 
1637
   If XSIZE or YSIZE is zero, we do not know the amount of memory being
1638
   referenced (the reference was BLKmode), so make the most pessimistic
1639
   assumptions.
1640
 
1641
   If XSIZE or YSIZE is negative, we may access memory outside the object
1642
   being referenced as a side effect.  This can happen when using AND to
1643
   align memory references, as is done on the Alpha.
1644
 
1645
   Nice to notice that varying addresses cannot conflict with fp if no
1646
   local variables had their addresses taken, but that's too hard now.  */
1647
 
1648
static int
1649
memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
1650
{
1651
  if (GET_CODE (x) == VALUE)
1652
    x = get_addr (x);
1653
  if (GET_CODE (y) == VALUE)
1654
    y = get_addr (y);
1655
  if (GET_CODE (x) == HIGH)
1656
    x = XEXP (x, 0);
1657
  else if (GET_CODE (x) == LO_SUM)
1658
    x = XEXP (x, 1);
1659
  else
1660
    x = addr_side_effect_eval (x, xsize, 0);
1661
  if (GET_CODE (y) == HIGH)
1662
    y = XEXP (y, 0);
1663
  else if (GET_CODE (y) == LO_SUM)
1664
    y = XEXP (y, 1);
1665
  else
1666
    y = addr_side_effect_eval (y, ysize, 0);
1667
 
1668
  if (rtx_equal_for_memref_p (x, y))
1669
    {
1670
      if (xsize <= 0 || ysize <= 0)
1671
        return 1;
1672
      if (c >= 0 && xsize > c)
1673
        return 1;
1674
      if (c < 0 && ysize+c > 0)
1675
        return 1;
1676
      return 0;
1677
    }
1678
 
1679
  /* This code used to check for conflicts involving stack references and
1680
     globals but the base address alias code now handles these cases.  */
1681
 
1682
  if (GET_CODE (x) == PLUS)
1683
    {
1684
      /* The fact that X is canonicalized means that this
1685
         PLUS rtx is canonicalized.  */
1686
      rtx x0 = XEXP (x, 0);
1687
      rtx x1 = XEXP (x, 1);
1688
 
1689
      if (GET_CODE (y) == PLUS)
1690
        {
1691
          /* The fact that Y is canonicalized means that this
1692
             PLUS rtx is canonicalized.  */
1693
          rtx y0 = XEXP (y, 0);
1694
          rtx y1 = XEXP (y, 1);
1695
 
1696
          if (rtx_equal_for_memref_p (x1, y1))
1697
            return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1698
          if (rtx_equal_for_memref_p (x0, y0))
1699
            return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1700
          if (GET_CODE (x1) == CONST_INT)
1701
            {
1702
              if (GET_CODE (y1) == CONST_INT)
1703
                return memrefs_conflict_p (xsize, x0, ysize, y0,
1704
                                           c - INTVAL (x1) + INTVAL (y1));
1705
              else
1706
                return memrefs_conflict_p (xsize, x0, ysize, y,
1707
                                           c - INTVAL (x1));
1708
            }
1709
          else if (GET_CODE (y1) == CONST_INT)
1710
            return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1711
 
1712
          return 1;
1713
        }
1714
      else if (GET_CODE (x1) == CONST_INT)
1715
        return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1716
    }
1717
  else if (GET_CODE (y) == PLUS)
1718
    {
1719
      /* The fact that Y is canonicalized means that this
1720
         PLUS rtx is canonicalized.  */
1721
      rtx y0 = XEXP (y, 0);
1722
      rtx y1 = XEXP (y, 1);
1723
 
1724
      if (GET_CODE (y1) == CONST_INT)
1725
        return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1726
      else
1727
        return 1;
1728
    }
1729
 
1730
  if (GET_CODE (x) == GET_CODE (y))
1731
    switch (GET_CODE (x))
1732
      {
1733
      case MULT:
1734
        {
1735
          /* Handle cases where we expect the second operands to be the
1736
             same, and check only whether the first operand would conflict
1737
             or not.  */
1738
          rtx x0, y0;
1739
          rtx x1 = canon_rtx (XEXP (x, 1));
1740
          rtx y1 = canon_rtx (XEXP (y, 1));
1741
          if (! rtx_equal_for_memref_p (x1, y1))
1742
            return 1;
1743
          x0 = canon_rtx (XEXP (x, 0));
1744
          y0 = canon_rtx (XEXP (y, 0));
1745
          if (rtx_equal_for_memref_p (x0, y0))
1746
            return (xsize == 0 || ysize == 0
1747
                    || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1748
 
1749
          /* Can't properly adjust our sizes.  */
1750
          if (GET_CODE (x1) != CONST_INT)
1751
            return 1;
1752
          xsize /= INTVAL (x1);
1753
          ysize /= INTVAL (x1);
1754
          c /= INTVAL (x1);
1755
          return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1756
        }
1757
 
1758
      default:
1759
        break;
1760
      }
1761
 
1762
  /* Treat an access through an AND (e.g. a subword access on an Alpha)
1763
     as an access with indeterminate size.  Assume that references
1764
     besides AND are aligned, so if the size of the other reference is
1765
     at least as large as the alignment, assume no other overlap.  */
1766
  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1767
    {
1768
      if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1769
        xsize = -1;
1770
      return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c);
1771
    }
1772
  if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1773
    {
1774
      /* ??? If we are indexing far enough into the array/structure, we
1775
         may yet be able to determine that we can not overlap.  But we
1776
         also need to that we are far enough from the end not to overlap
1777
         a following reference, so we do nothing with that for now.  */
1778
      if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1779
        ysize = -1;
1780
      return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c);
1781
    }
1782
 
1783
  if (CONSTANT_P (x))
1784
    {
1785
      if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1786
        {
1787
          c += (INTVAL (y) - INTVAL (x));
1788
          return (xsize <= 0 || ysize <= 0
1789
                  || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1790
        }
1791
 
1792
      if (GET_CODE (x) == CONST)
1793
        {
1794
          if (GET_CODE (y) == CONST)
1795
            return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1796
                                       ysize, canon_rtx (XEXP (y, 0)), c);
1797
          else
1798
            return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1799
                                       ysize, y, c);
1800
        }
1801
      if (GET_CODE (y) == CONST)
1802
        return memrefs_conflict_p (xsize, x, ysize,
1803
                                   canon_rtx (XEXP (y, 0)), c);
1804
 
1805
      if (CONSTANT_P (y))
1806
        return (xsize <= 0 || ysize <= 0
1807
                || (rtx_equal_for_memref_p (x, y)
1808
                    && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1809
 
1810
      return 1;
1811
    }
1812
  return 1;
1813
}
1814
 
1815
/* Functions to compute memory dependencies.
1816
 
1817
   Since we process the insns in execution order, we can build tables
1818
   to keep track of what registers are fixed (and not aliased), what registers
1819
   are varying in known ways, and what registers are varying in unknown
1820
   ways.
1821
 
1822
   If both memory references are volatile, then there must always be a
1823
   dependence between the two references, since their order can not be
1824
   changed.  A volatile and non-volatile reference can be interchanged
1825
   though.
1826
 
1827
   A MEM_IN_STRUCT reference at a non-AND varying address can never
1828
   conflict with a non-MEM_IN_STRUCT reference at a fixed address.  We
1829
   also must allow AND addresses, because they may generate accesses
1830
   outside the object being referenced.  This is used to generate
1831
   aligned addresses from unaligned addresses, for instance, the alpha
1832
   storeqi_unaligned pattern.  */
1833
 
1834
/* Read dependence: X is read after read in MEM takes place.  There can
1835
   only be a dependence here if both reads are volatile.  */
1836
 
1837
int
1838
read_dependence (rtx mem, rtx x)
1839
{
1840
  return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1841
}
1842
 
1843
/* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1844
   MEM2 is a reference to a structure at a varying address, or returns
1845
   MEM2 if vice versa.  Otherwise, returns NULL_RTX.  If a non-NULL
1846
   value is returned MEM1 and MEM2 can never alias.  VARIES_P is used
1847
   to decide whether or not an address may vary; it should return
1848
   nonzero whenever variation is possible.
1849
   MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2.  */
1850
 
1851
static rtx
1852
fixed_scalar_and_varying_struct_p (rtx mem1, rtx mem2, rtx mem1_addr,
1853
                                   rtx mem2_addr,
1854
                                   int (*varies_p) (rtx, int))
1855
{
1856
  if (! flag_strict_aliasing)
1857
    return NULL_RTX;
1858
 
1859
  if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1860
      && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
1861
    /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1862
       varying address.  */
1863
    return mem1;
1864
 
1865
  if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1866
      && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
1867
    /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1868
       varying address.  */
1869
    return mem2;
1870
 
1871
  return NULL_RTX;
1872
}
1873
 
1874
/* Returns nonzero if something about the mode or address format MEM1
1875
   indicates that it might well alias *anything*.  */
1876
 
1877
static int
1878
aliases_everything_p (rtx mem)
1879
{
1880
  if (GET_CODE (XEXP (mem, 0)) == AND)
1881
    /* If the address is an AND, it's very hard to know at what it is
1882
       actually pointing.  */
1883
    return 1;
1884
 
1885
  return 0;
1886
}
1887
 
1888
/* Return true if we can determine that the fields referenced cannot
1889
   overlap for any pair of objects.  */
1890
 
1891
static bool
1892
nonoverlapping_component_refs_p (tree x, tree y)
1893
{
1894
  tree fieldx, fieldy, typex, typey, orig_y;
1895
 
1896
  do
1897
    {
1898
      /* The comparison has to be done at a common type, since we don't
1899
         know how the inheritance hierarchy works.  */
1900
      orig_y = y;
1901
      do
1902
        {
1903
          fieldx = TREE_OPERAND (x, 1);
1904
          typex = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldx));
1905
 
1906
          y = orig_y;
1907
          do
1908
            {
1909
              fieldy = TREE_OPERAND (y, 1);
1910
              typey = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldy));
1911
 
1912
              if (typex == typey)
1913
                goto found;
1914
 
1915
              y = TREE_OPERAND (y, 0);
1916
            }
1917
          while (y && TREE_CODE (y) == COMPONENT_REF);
1918
 
1919
          x = TREE_OPERAND (x, 0);
1920
        }
1921
      while (x && TREE_CODE (x) == COMPONENT_REF);
1922
      /* Never found a common type.  */
1923
      return false;
1924
 
1925
    found:
1926
      /* If we're left with accessing different fields of a structure,
1927
         then no overlap.  */
1928
      if (TREE_CODE (typex) == RECORD_TYPE
1929
          && fieldx != fieldy)
1930
        return true;
1931
 
1932
      /* The comparison on the current field failed.  If we're accessing
1933
         a very nested structure, look at the next outer level.  */
1934
      x = TREE_OPERAND (x, 0);
1935
      y = TREE_OPERAND (y, 0);
1936
    }
1937
  while (x && y
1938
         && TREE_CODE (x) == COMPONENT_REF
1939
         && TREE_CODE (y) == COMPONENT_REF);
1940
 
1941
  return false;
1942
}
1943
 
1944
/* Look at the bottom of the COMPONENT_REF list for a DECL, and return it.  */
1945
 
1946
static tree
1947
decl_for_component_ref (tree x)
1948
{
1949
  do
1950
    {
1951
      x = TREE_OPERAND (x, 0);
1952
    }
1953
  while (x && TREE_CODE (x) == COMPONENT_REF);
1954
 
1955
  return x && DECL_P (x) ? x : NULL_TREE;
1956
}
1957
 
1958
/* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
1959
   offset of the field reference.  */
1960
 
1961
static rtx
1962
adjust_offset_for_component_ref (tree x, rtx offset)
1963
{
1964
  HOST_WIDE_INT ioffset;
1965
 
1966
  if (! offset)
1967
    return NULL_RTX;
1968
 
1969
  ioffset = INTVAL (offset);
1970
  do
1971
    {
1972
      tree offset = component_ref_field_offset (x);
1973
      tree field = TREE_OPERAND (x, 1);
1974
 
1975
      if (! host_integerp (offset, 1))
1976
        return NULL_RTX;
1977
      ioffset += (tree_low_cst (offset, 1)
1978
                  + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1979
                     / BITS_PER_UNIT));
1980
 
1981
      x = TREE_OPERAND (x, 0);
1982
    }
1983
  while (x && TREE_CODE (x) == COMPONENT_REF);
1984
 
1985
  return GEN_INT (ioffset);
1986
}
1987
 
1988
/* Return nonzero if we can determine the exprs corresponding to memrefs
1989
   X and Y and they do not overlap.  */
1990
 
1991
static int
1992
nonoverlapping_memrefs_p (rtx x, rtx y)
1993
{
1994
  tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
1995
  rtx rtlx, rtly;
1996
  rtx basex, basey;
1997
  rtx moffsetx, moffsety;
1998
  HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
1999
 
2000
  /* Unless both have exprs, we can't tell anything.  */
2001
  if (exprx == 0 || expry == 0)
2002
    return 0;
2003
 
2004
  /* If both are field references, we may be able to determine something.  */
2005
  if (TREE_CODE (exprx) == COMPONENT_REF
2006
      && TREE_CODE (expry) == COMPONENT_REF
2007
      && nonoverlapping_component_refs_p (exprx, expry))
2008
    return 1;
2009
 
2010
 
2011
  /* If the field reference test failed, look at the DECLs involved.  */
2012
  moffsetx = MEM_OFFSET (x);
2013
  if (TREE_CODE (exprx) == COMPONENT_REF)
2014
    {
2015
      if (TREE_CODE (expry) == VAR_DECL
2016
          && POINTER_TYPE_P (TREE_TYPE (expry)))
2017
        {
2018
         tree field = TREE_OPERAND (exprx, 1);
2019
         tree fieldcontext = DECL_FIELD_CONTEXT (field);
2020
         if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
2021
                                                       TREE_TYPE (field)))
2022
           return 1;
2023
        }
2024
      {
2025
        tree t = decl_for_component_ref (exprx);
2026
        if (! t)
2027
          return 0;
2028
        moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
2029
        exprx = t;
2030
      }
2031
    }
2032
  else if (INDIRECT_REF_P (exprx))
2033
    {
2034
      exprx = TREE_OPERAND (exprx, 0);
2035
      if (flag_argument_noalias < 2
2036
          || TREE_CODE (exprx) != PARM_DECL)
2037
        return 0;
2038
    }
2039
 
2040
  moffsety = MEM_OFFSET (y);
2041
  if (TREE_CODE (expry) == COMPONENT_REF)
2042
    {
2043
      if (TREE_CODE (exprx) == VAR_DECL
2044
          && POINTER_TYPE_P (TREE_TYPE (exprx)))
2045
        {
2046
         tree field = TREE_OPERAND (expry, 1);
2047
         tree fieldcontext = DECL_FIELD_CONTEXT (field);
2048
         if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
2049
                                                       TREE_TYPE (field)))
2050
           return 1;
2051
        }
2052
      {
2053
        tree t = decl_for_component_ref (expry);
2054
        if (! t)
2055
          return 0;
2056
        moffsety = adjust_offset_for_component_ref (expry, moffsety);
2057
        expry = t;
2058
      }
2059
    }
2060
  else if (INDIRECT_REF_P (expry))
2061
    {
2062
      expry = TREE_OPERAND (expry, 0);
2063
      if (flag_argument_noalias < 2
2064
          || TREE_CODE (expry) != PARM_DECL)
2065
        return 0;
2066
    }
2067
 
2068
  if (! DECL_P (exprx) || ! DECL_P (expry))
2069
    return 0;
2070
 
2071
  rtlx = DECL_RTL (exprx);
2072
  rtly = DECL_RTL (expry);
2073
 
2074
  /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2075
     can't overlap unless they are the same because we never reuse that part
2076
     of the stack frame used for locals for spilled pseudos.  */
2077
  if ((!MEM_P (rtlx) || !MEM_P (rtly))
2078
      && ! rtx_equal_p (rtlx, rtly))
2079
    return 1;
2080
 
2081
  /* Get the base and offsets of both decls.  If either is a register, we
2082
     know both are and are the same, so use that as the base.  The only
2083
     we can avoid overlap is if we can deduce that they are nonoverlapping
2084
     pieces of that decl, which is very rare.  */
2085
  basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
2086
  if (GET_CODE (basex) == PLUS && GET_CODE (XEXP (basex, 1)) == CONST_INT)
2087
    offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2088
 
2089
  basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
2090
  if (GET_CODE (basey) == PLUS && GET_CODE (XEXP (basey, 1)) == CONST_INT)
2091
    offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2092
 
2093
  /* If the bases are different, we know they do not overlap if both
2094
     are constants or if one is a constant and the other a pointer into the
2095
     stack frame.  Otherwise a different base means we can't tell if they
2096
     overlap or not.  */
2097
  if (! rtx_equal_p (basex, basey))
2098
    return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2099
            || (CONSTANT_P (basex) && REG_P (basey)
2100
                && REGNO_PTR_FRAME_P (REGNO (basey)))
2101
            || (CONSTANT_P (basey) && REG_P (basex)
2102
                && REGNO_PTR_FRAME_P (REGNO (basex))));
2103
 
2104
  sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
2105
           : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
2106
           : -1);
2107
  sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly))
2108
           : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
2109
           -1);
2110
 
2111
  /* If we have an offset for either memref, it can update the values computed
2112
     above.  */
2113
  if (moffsetx)
2114
    offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2115
  if (moffsety)
2116
    offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
2117
 
2118
  /* If a memref has both a size and an offset, we can use the smaller size.
2119
     We can't do this if the offset isn't known because we must view this
2120
     memref as being anywhere inside the DECL's MEM.  */
2121
  if (MEM_SIZE (x) && moffsetx)
2122
    sizex = INTVAL (MEM_SIZE (x));
2123
  if (MEM_SIZE (y) && moffsety)
2124
    sizey = INTVAL (MEM_SIZE (y));
2125
 
2126
  /* Put the values of the memref with the lower offset in X's values.  */
2127
  if (offsetx > offsety)
2128
    {
2129
      tem = offsetx, offsetx = offsety, offsety = tem;
2130
      tem = sizex, sizex = sizey, sizey = tem;
2131
    }
2132
 
2133
  /* If we don't know the size of the lower-offset value, we can't tell
2134
     if they conflict.  Otherwise, we do the test.  */
2135
  return sizex >= 0 && offsety >= offsetx + sizex;
2136
}
2137
 
2138
/* True dependence: X is read after store in MEM takes place.  */
2139
 
2140
int
2141
true_dependence (rtx mem, enum machine_mode mem_mode, rtx x,
2142
                 int (*varies) (rtx, int))
2143
{
2144
  rtx x_addr, mem_addr;
2145
  rtx base;
2146
 
2147
  if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2148
    return 1;
2149
 
2150
  /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2151
     This is used in epilogue deallocation functions.  */
2152
  if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2153
    return 1;
2154
  if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2155
    return 1;
2156
  if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2157
      || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2158
    return 1;
2159
 
2160
  if (DIFFERENT_ALIAS_SETS_P (x, mem))
2161
    return 0;
2162
 
2163
  /* Read-only memory is by definition never modified, and therefore can't
2164
     conflict with anything.  We don't expect to find read-only set on MEM,
2165
     but stupid user tricks can produce them, so don't die.  */
2166
  if (MEM_READONLY_P (x))
2167
    return 0;
2168
 
2169
  if (nonoverlapping_memrefs_p (mem, x))
2170
    return 0;
2171
 
2172
  if (mem_mode == VOIDmode)
2173
    mem_mode = GET_MODE (mem);
2174
 
2175
  x_addr = get_addr (XEXP (x, 0));
2176
  mem_addr = get_addr (XEXP (mem, 0));
2177
 
2178
  base = find_base_term (x_addr);
2179
  if (base && (GET_CODE (base) == LABEL_REF
2180
               || (GET_CODE (base) == SYMBOL_REF
2181
                   && CONSTANT_POOL_ADDRESS_P (base))))
2182
    return 0;
2183
 
2184
  if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2185
    return 0;
2186
 
2187
  x_addr = canon_rtx (x_addr);
2188
  mem_addr = canon_rtx (mem_addr);
2189
 
2190
  if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2191
                            SIZE_FOR_MODE (x), x_addr, 0))
2192
    return 0;
2193
 
2194
  if (aliases_everything_p (x))
2195
    return 1;
2196
 
2197
  /* We cannot use aliases_everything_p to test MEM, since we must look
2198
     at MEM_MODE, rather than GET_MODE (MEM).  */
2199
  if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2200
    return 1;
2201
 
2202
  /* In true_dependence we also allow BLKmode to alias anything.  Why
2203
     don't we do this in anti_dependence and output_dependence?  */
2204
  if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2205
    return 1;
2206
 
2207
  return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2208
                                              varies);
2209
}
2210
 
2211
/* Canonical true dependence: X is read after store in MEM takes place.
2212
   Variant of true_dependence which assumes MEM has already been
2213
   canonicalized (hence we no longer do that here).
2214
   The mem_addr argument has been added, since true_dependence computed
2215
   this value prior to canonicalizing.  */
2216
 
2217
int
2218
canon_true_dependence (rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2219
                       rtx x, int (*varies) (rtx, int))
2220
{
2221
  rtx x_addr;
2222
 
2223
  if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2224
    return 1;
2225
 
2226
  /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2227
     This is used in epilogue deallocation functions.  */
2228
  if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2229
    return 1;
2230
  if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2231
    return 1;
2232
  if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2233
      || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2234
    return 1;
2235
 
2236
  if (DIFFERENT_ALIAS_SETS_P (x, mem))
2237
    return 0;
2238
 
2239
  /* Read-only memory is by definition never modified, and therefore can't
2240
     conflict with anything.  We don't expect to find read-only set on MEM,
2241
     but stupid user tricks can produce them, so don't die.  */
2242
  if (MEM_READONLY_P (x))
2243
    return 0;
2244
 
2245
  if (nonoverlapping_memrefs_p (x, mem))
2246
    return 0;
2247
 
2248
  x_addr = get_addr (XEXP (x, 0));
2249
 
2250
  if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2251
    return 0;
2252
 
2253
  x_addr = canon_rtx (x_addr);
2254
  if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2255
                            SIZE_FOR_MODE (x), x_addr, 0))
2256
    return 0;
2257
 
2258
  if (aliases_everything_p (x))
2259
    return 1;
2260
 
2261
  /* We cannot use aliases_everything_p to test MEM, since we must look
2262
     at MEM_MODE, rather than GET_MODE (MEM).  */
2263
  if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2264
    return 1;
2265
 
2266
  /* In true_dependence we also allow BLKmode to alias anything.  Why
2267
     don't we do this in anti_dependence and output_dependence?  */
2268
  if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2269
    return 1;
2270
 
2271
  return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2272
                                              varies);
2273
}
2274
 
2275
/* Returns nonzero if a write to X might alias a previous read from
2276
   (or, if WRITEP is nonzero, a write to) MEM.  */
2277
 
2278
static int
2279
write_dependence_p (rtx mem, rtx x, int writep)
2280
{
2281
  rtx x_addr, mem_addr;
2282
  rtx fixed_scalar;
2283
  rtx base;
2284
 
2285
  if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2286
    return 1;
2287
 
2288
  /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2289
     This is used in epilogue deallocation functions.  */
2290
  if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2291
    return 1;
2292
  if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2293
    return 1;
2294
  if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2295
      || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2296
    return 1;
2297
 
2298
  if (DIFFERENT_ALIAS_SETS_P (x, mem))
2299
    return 0;
2300
 
2301
  /* A read from read-only memory can't conflict with read-write memory.  */
2302
  if (!writep && MEM_READONLY_P (mem))
2303
    return 0;
2304
 
2305
  if (nonoverlapping_memrefs_p (x, mem))
2306
    return 0;
2307
 
2308
  x_addr = get_addr (XEXP (x, 0));
2309
  mem_addr = get_addr (XEXP (mem, 0));
2310
 
2311
  if (! writep)
2312
    {
2313
      base = find_base_term (mem_addr);
2314
      if (base && (GET_CODE (base) == LABEL_REF
2315
                   || (GET_CODE (base) == SYMBOL_REF
2316
                       && CONSTANT_POOL_ADDRESS_P (base))))
2317
        return 0;
2318
    }
2319
 
2320
  if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2321
                          GET_MODE (mem)))
2322
    return 0;
2323
 
2324
  x_addr = canon_rtx (x_addr);
2325
  mem_addr = canon_rtx (mem_addr);
2326
 
2327
  if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2328
                           SIZE_FOR_MODE (x), x_addr, 0))
2329
    return 0;
2330
 
2331
  fixed_scalar
2332
    = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2333
                                         rtx_addr_varies_p);
2334
 
2335
  return (!(fixed_scalar == mem && !aliases_everything_p (x))
2336
          && !(fixed_scalar == x && !aliases_everything_p (mem)));
2337
}
2338
 
2339
/* Anti dependence: X is written after read in MEM takes place.  */
2340
 
2341
int
2342
anti_dependence (rtx mem, rtx x)
2343
{
2344
  return write_dependence_p (mem, x, /*writep=*/0);
2345
}
2346
 
2347
/* Output dependence: X is written after store in MEM takes place.  */
2348
 
2349
int
2350
output_dependence (rtx mem, rtx x)
2351
{
2352
  return write_dependence_p (mem, x, /*writep=*/1);
2353
}
2354
 
2355
 
2356
void
2357
init_alias_once (void)
2358
{
2359
  int i;
2360
 
2361
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2362
    /* Check whether this register can hold an incoming pointer
2363
       argument.  FUNCTION_ARG_REGNO_P tests outgoing register
2364
       numbers, so translate if necessary due to register windows.  */
2365
    if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2366
        && HARD_REGNO_MODE_OK (i, Pmode))
2367
      static_reg_base_value[i]
2368
        = gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i));
2369
 
2370
  static_reg_base_value[STACK_POINTER_REGNUM]
2371
    = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2372
  static_reg_base_value[ARG_POINTER_REGNUM]
2373
    = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2374
  static_reg_base_value[FRAME_POINTER_REGNUM]
2375
    = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2376
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2377
  static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2378
    = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2379
#endif
2380
}
2381
 
2382
/* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
2383
   to be memory reference.  */
2384
static bool memory_modified;
2385
static void
2386
memory_modified_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
2387
{
2388
  if (MEM_P (x))
2389
    {
2390
      if (anti_dependence (x, (rtx)data) || output_dependence (x, (rtx)data))
2391
        memory_modified = true;
2392
    }
2393
}
2394
 
2395
 
2396
/* Return true when INSN possibly modify memory contents of MEM
2397
   (i.e. address can be modified).  */
2398
bool
2399
memory_modified_in_insn_p (rtx mem, rtx insn)
2400
{
2401
  if (!INSN_P (insn))
2402
    return false;
2403
  memory_modified = false;
2404
  note_stores (PATTERN (insn), memory_modified_1, mem);
2405
  return memory_modified;
2406
}
2407
 
2408
/* Initialize the aliasing machinery.  Initialize the REG_KNOWN_VALUE
2409
   array.  */
2410
 
2411
void
2412
init_alias_analysis (void)
2413
{
2414
  unsigned int maxreg = max_reg_num ();
2415
  int changed, pass;
2416
  int i;
2417
  unsigned int ui;
2418
  rtx insn;
2419
 
2420
  timevar_push (TV_ALIAS_ANALYSIS);
2421
 
2422
  reg_known_value_size = maxreg - FIRST_PSEUDO_REGISTER;
2423
  reg_known_value = ggc_calloc (reg_known_value_size, sizeof (rtx));
2424
  reg_known_equiv_p = xcalloc (reg_known_value_size, sizeof (bool));
2425
 
2426
  /* If we have memory allocated from the previous run, use it.  */
2427
  if (old_reg_base_value)
2428
    reg_base_value = old_reg_base_value;
2429
 
2430
  if (reg_base_value)
2431
    VEC_truncate (rtx, reg_base_value, 0);
2432
 
2433
  VEC_safe_grow (rtx, gc, reg_base_value, maxreg);
2434
  memset (VEC_address (rtx, reg_base_value), 0,
2435
          sizeof (rtx) * VEC_length (rtx, reg_base_value));
2436
 
2437
  new_reg_base_value = XNEWVEC (rtx, maxreg);
2438
  reg_seen = XNEWVEC (char, maxreg);
2439
 
2440
  /* The basic idea is that each pass through this loop will use the
2441
     "constant" information from the previous pass to propagate alias
2442
     information through another level of assignments.
2443
 
2444
     This could get expensive if the assignment chains are long.  Maybe
2445
     we should throttle the number of iterations, possibly based on
2446
     the optimization level or flag_expensive_optimizations.
2447
 
2448
     We could propagate more information in the first pass by making use
2449
     of REG_N_SETS to determine immediately that the alias information
2450
     for a pseudo is "constant".
2451
 
2452
     A program with an uninitialized variable can cause an infinite loop
2453
     here.  Instead of doing a full dataflow analysis to detect such problems
2454
     we just cap the number of iterations for the loop.
2455
 
2456
     The state of the arrays for the set chain in question does not matter
2457
     since the program has undefined behavior.  */
2458
 
2459
  pass = 0;
2460
  do
2461
    {
2462
      /* Assume nothing will change this iteration of the loop.  */
2463
      changed = 0;
2464
 
2465
      /* We want to assign the same IDs each iteration of this loop, so
2466
         start counting from zero each iteration of the loop.  */
2467
      unique_id = 0;
2468
 
2469
      /* We're at the start of the function each iteration through the
2470
         loop, so we're copying arguments.  */
2471
      copying_arguments = true;
2472
 
2473
      /* Wipe the potential alias information clean for this pass.  */
2474
      memset (new_reg_base_value, 0, maxreg * sizeof (rtx));
2475
 
2476
      /* Wipe the reg_seen array clean.  */
2477
      memset (reg_seen, 0, maxreg);
2478
 
2479
      /* Mark all hard registers which may contain an address.
2480
         The stack, frame and argument pointers may contain an address.
2481
         An argument register which can hold a Pmode value may contain
2482
         an address even if it is not in BASE_REGS.
2483
 
2484
         The address expression is VOIDmode for an argument and
2485
         Pmode for other registers.  */
2486
 
2487
      memcpy (new_reg_base_value, static_reg_base_value,
2488
              FIRST_PSEUDO_REGISTER * sizeof (rtx));
2489
 
2490
      /* Walk the insns adding values to the new_reg_base_value array.  */
2491
      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2492
        {
2493
          if (INSN_P (insn))
2494
            {
2495
              rtx note, set;
2496
 
2497
#if defined (HAVE_prologue) || defined (HAVE_epilogue)
2498
              /* The prologue/epilogue insns are not threaded onto the
2499
                 insn chain until after reload has completed.  Thus,
2500
                 there is no sense wasting time checking if INSN is in
2501
                 the prologue/epilogue until after reload has completed.  */
2502
              if (reload_completed
2503
                  && prologue_epilogue_contains (insn))
2504
                continue;
2505
#endif
2506
 
2507
              /* If this insn has a noalias note, process it,  Otherwise,
2508
                 scan for sets.  A simple set will have no side effects
2509
                 which could change the base value of any other register.  */
2510
 
2511
              if (GET_CODE (PATTERN (insn)) == SET
2512
                  && REG_NOTES (insn) != 0
2513
                  && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2514
                record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2515
              else
2516
                note_stores (PATTERN (insn), record_set, NULL);
2517
 
2518
              set = single_set (insn);
2519
 
2520
              if (set != 0
2521
                  && REG_P (SET_DEST (set))
2522
                  && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2523
                {
2524
                  unsigned int regno = REGNO (SET_DEST (set));
2525
                  rtx src = SET_SRC (set);
2526
                  rtx t;
2527
 
2528
                  if (REG_NOTES (insn) != 0
2529
                      && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
2530
                           && REG_N_SETS (regno) == 1)
2531
                          || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
2532
                      && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2533
                      && ! rtx_varies_p (XEXP (note, 0), 1)
2534
                      && ! reg_overlap_mentioned_p (SET_DEST (set),
2535
                                                    XEXP (note, 0)))
2536
                    {
2537
                      set_reg_known_value (regno, XEXP (note, 0));
2538
                      set_reg_known_equiv_p (regno,
2539
                        REG_NOTE_KIND (note) == REG_EQUIV);
2540
                    }
2541
                  else if (REG_N_SETS (regno) == 1
2542
                           && GET_CODE (src) == PLUS
2543
                           && REG_P (XEXP (src, 0))
2544
                           && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
2545
                           && GET_CODE (XEXP (src, 1)) == CONST_INT)
2546
                    {
2547
                      t = plus_constant (t, INTVAL (XEXP (src, 1)));
2548
                      set_reg_known_value (regno, t);
2549
                      set_reg_known_equiv_p (regno, 0);
2550
                    }
2551
                  else if (REG_N_SETS (regno) == 1
2552
                           && ! rtx_varies_p (src, 1))
2553
                    {
2554
                      set_reg_known_value (regno, src);
2555
                      set_reg_known_equiv_p (regno, 0);
2556
                    }
2557
                }
2558
            }
2559
          else if (NOTE_P (insn)
2560
                   && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
2561
            copying_arguments = false;
2562
        }
2563
 
2564
      /* Now propagate values from new_reg_base_value to reg_base_value.  */
2565
      gcc_assert (maxreg == (unsigned int) max_reg_num());
2566
 
2567
      for (ui = 0; ui < maxreg; ui++)
2568
        {
2569
          if (new_reg_base_value[ui]
2570
              && new_reg_base_value[ui] != VEC_index (rtx, reg_base_value, ui)
2571
              && ! rtx_equal_p (new_reg_base_value[ui],
2572
                                VEC_index (rtx, reg_base_value, ui)))
2573
            {
2574
              VEC_replace (rtx, reg_base_value, ui, new_reg_base_value[ui]);
2575
              changed = 1;
2576
            }
2577
        }
2578
    }
2579
  while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2580
 
2581
  /* Fill in the remaining entries.  */
2582
  for (i = 0; i < (int)reg_known_value_size; i++)
2583
    if (reg_known_value[i] == 0)
2584
      reg_known_value[i] = regno_reg_rtx[i + FIRST_PSEUDO_REGISTER];
2585
 
2586
  /* Simplify the reg_base_value array so that no register refers to
2587
     another register, except to special registers indirectly through
2588
     ADDRESS expressions.
2589
 
2590
     In theory this loop can take as long as O(registers^2), but unless
2591
     there are very long dependency chains it will run in close to linear
2592
     time.
2593
 
2594
     This loop may not be needed any longer now that the main loop does
2595
     a better job at propagating alias information.  */
2596
  pass = 0;
2597
  do
2598
    {
2599
      changed = 0;
2600
      pass++;
2601
      for (ui = 0; ui < maxreg; ui++)
2602
        {
2603
          rtx base = VEC_index (rtx, reg_base_value, ui);
2604
          if (base && REG_P (base))
2605
            {
2606
              unsigned int base_regno = REGNO (base);
2607
              if (base_regno == ui)             /* register set from itself */
2608
                VEC_replace (rtx, reg_base_value, ui, 0);
2609
              else
2610
                VEC_replace (rtx, reg_base_value, ui,
2611
                             VEC_index (rtx, reg_base_value, base_regno));
2612
              changed = 1;
2613
            }
2614
        }
2615
    }
2616
  while (changed && pass < MAX_ALIAS_LOOP_PASSES);
2617
 
2618
  /* Clean up.  */
2619
  free (new_reg_base_value);
2620
  new_reg_base_value = 0;
2621
  free (reg_seen);
2622
  reg_seen = 0;
2623
  timevar_pop (TV_ALIAS_ANALYSIS);
2624
}
2625
 
2626
void
2627
end_alias_analysis (void)
2628
{
2629
  old_reg_base_value = reg_base_value;
2630
  ggc_free (reg_known_value);
2631
  reg_known_value = 0;
2632
  reg_known_value_size = 0;
2633
  free (reg_known_equiv_p);
2634
  reg_known_equiv_p = 0;
2635
}
2636
 
2637
#include "gt-alias.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.