OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [tree-ssa-structalias.c] - Blame information for rev 728

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Tree based points-to analysis
2
   Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
3
   Free Software Foundation, Inc.
4
   Contributed by Daniel Berlin <dberlin@dberlin.org>
5
 
6
   This file is part of GCC.
7
 
8
   GCC is free software; you can redistribute it and/or modify
9
   under the terms of the GNU General Public License as published by
10
   the Free Software Foundation; either version 3 of the License, or
11
   (at your option) any later version.
12
 
13
   GCC is distributed in the hope that it will be useful,
14
   but WITHOUT ANY WARRANTY; without even the implied warranty of
15
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16
   GNU General Public License for more details.
17
 
18
   You should have received a copy of the GNU General Public License
19
   along with GCC; see the file COPYING3.  If not see
20
   <http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "ggc.h"
27
#include "obstack.h"
28
#include "bitmap.h"
29
#include "flags.h"
30
#include "basic-block.h"
31
#include "output.h"
32
#include "tree.h"
33
#include "tree-flow.h"
34
#include "tree-inline.h"
35
#include "diagnostic-core.h"
36
#include "gimple.h"
37
#include "hashtab.h"
38
#include "function.h"
39
#include "cgraph.h"
40
#include "tree-pass.h"
41
#include "timevar.h"
42
#include "alloc-pool.h"
43
#include "splay-tree.h"
44
#include "params.h"
45
#include "cgraph.h"
46
#include "alias.h"
47
#include "pointer-set.h"
48
 
49
/* The idea behind this analyzer is to generate set constraints from the
50
   program, then solve the resulting constraints in order to generate the
51
   points-to sets.
52
 
53
   Set constraints are a way of modeling program analysis problems that
54
   involve sets.  They consist of an inclusion constraint language,
55
   describing the variables (each variable is a set) and operations that
56
   are involved on the variables, and a set of rules that derive facts
57
   from these operations.  To solve a system of set constraints, you derive
58
   all possible facts under the rules, which gives you the correct sets
59
   as a consequence.
60
 
61
   See  "Efficient Field-sensitive pointer analysis for C" by "David
62
   J. Pearce and Paul H. J. Kelly and Chris Hankin, at
63
   http://citeseer.ist.psu.edu/pearce04efficient.html
64
 
65
   Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
66
   of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
67
   http://citeseer.ist.psu.edu/heintze01ultrafast.html
68
 
69
   There are three types of real constraint expressions, DEREF,
70
   ADDRESSOF, and SCALAR.  Each constraint expression consists
71
   of a constraint type, a variable, and an offset.
72
 
73
   SCALAR is a constraint expression type used to represent x, whether
74
   it appears on the LHS or the RHS of a statement.
75
   DEREF is a constraint expression type used to represent *x, whether
76
   it appears on the LHS or the RHS of a statement.
77
   ADDRESSOF is a constraint expression used to represent &x, whether
78
   it appears on the LHS or the RHS of a statement.
79
 
80
   Each pointer variable in the program is assigned an integer id, and
81
   each field of a structure variable is assigned an integer id as well.
82
 
83
   Structure variables are linked to their list of fields through a "next
84
   field" in each variable that points to the next field in offset
85
   order.
86
   Each variable for a structure field has
87
 
88
   1. "size", that tells the size in bits of that field.
89
   2. "fullsize, that tells the size in bits of the entire structure.
90
   3. "offset", that tells the offset in bits from the beginning of the
91
   structure to this field.
92
 
93
   Thus,
94
   struct f
95
   {
96
     int a;
97
     int b;
98
   } foo;
99
   int *bar;
100
 
101
   looks like
102
 
103
   foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
104
   foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
105
   bar -> id 3, size 32, offset 0, fullsize 32, next NULL
106
 
107
 
108
  In order to solve the system of set constraints, the following is
109
  done:
110
 
111
  1. Each constraint variable x has a solution set associated with it,
112
  Sol(x).
113
 
114
  2. Constraints are separated into direct, copy, and complex.
115
  Direct constraints are ADDRESSOF constraints that require no extra
116
  processing, such as P = &Q
117
  Copy constraints are those of the form P = Q.
118
  Complex constraints are all the constraints involving dereferences
119
  and offsets (including offsetted copies).
120
 
121
  3. All direct constraints of the form P = &Q are processed, such
122
  that Q is added to Sol(P)
123
 
124
  4. All complex constraints for a given constraint variable are stored in a
125
  linked list attached to that variable's node.
126
 
127
  5. A directed graph is built out of the copy constraints. Each
128
  constraint variable is a node in the graph, and an edge from
129
  Q to P is added for each copy constraint of the form P = Q
130
 
131
  6. The graph is then walked, and solution sets are
132
  propagated along the copy edges, such that an edge from Q to P
133
  causes Sol(P) <- Sol(P) union Sol(Q).
134
 
135
  7.  As we visit each node, all complex constraints associated with
136
  that node are processed by adding appropriate copy edges to the graph, or the
137
  appropriate variables to the solution set.
138
 
139
  8. The process of walking the graph is iterated until no solution
140
  sets change.
141
 
142
  Prior to walking the graph in steps 6 and 7, We perform static
143
  cycle elimination on the constraint graph, as well
144
  as off-line variable substitution.
145
 
146
  TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
147
  on and turned into anything), but isn't.  You can just see what offset
148
  inside the pointed-to struct it's going to access.
149
 
150
  TODO: Constant bounded arrays can be handled as if they were structs of the
151
  same number of elements.
152
 
153
  TODO: Modeling heap and incoming pointers becomes much better if we
154
  add fields to them as we discover them, which we could do.
155
 
156
  TODO: We could handle unions, but to be honest, it's probably not
157
  worth the pain or slowdown.  */
158
 
159
/* IPA-PTA optimizations possible.
160
 
161
   When the indirect function called is ANYTHING we can add disambiguation
162
   based on the function signatures (or simply the parameter count which
163
   is the varinfo size).  We also do not need to consider functions that
164
   do not have their address taken.
165
 
166
   The is_global_var bit which marks escape points is overly conservative
167
   in IPA mode.  Split it to is_escape_point and is_global_var - only
168
   externally visible globals are escape points in IPA mode.  This is
169
   also needed to fix the pt_solution_includes_global predicate
170
   (and thus ptr_deref_may_alias_global_p).
171
 
172
   The way we introduce DECL_PT_UID to avoid fixing up all points-to
173
   sets in the translation unit when we copy a DECL during inlining
174
   pessimizes precision.  The advantage is that the DECL_PT_UID keeps
175
   compile-time and memory usage overhead low - the points-to sets
176
   do not grow or get unshared as they would during a fixup phase.
177
   An alternative solution is to delay IPA PTA until after all
178
   inlining transformations have been applied.
179
 
180
   The way we propagate clobber/use information isn't optimized.
181
   It should use a new complex constraint that properly filters
182
   out local variables of the callee (though that would make
183
   the sets invalid after inlining).  OTOH we might as well
184
   admit defeat to WHOPR and simply do all the clobber/use analysis
185
   and propagation after PTA finished but before we threw away
186
   points-to information for memory variables.  WHOPR and PTA
187
   do not play along well anyway - the whole constraint solving
188
   would need to be done in WPA phase and it will be very interesting
189
   to apply the results to local SSA names during LTRANS phase.
190
 
191
   We probably should compute a per-function unit-ESCAPE solution
192
   propagating it simply like the clobber / uses solutions.  The
193
   solution can go alongside the non-IPA espaced solution and be
194
   used to query which vars escape the unit through a function.
195
 
196
   We never put function decls in points-to sets so we do not
197
   keep the set of called functions for indirect calls.
198
 
199
   And probably more.  */
200
 
201
static bool use_field_sensitive = true;
202
static int in_ipa_mode = 0;
203
 
204
/* Used for predecessor bitmaps. */
205
static bitmap_obstack predbitmap_obstack;
206
 
207
/* Used for points-to sets.  */
208
static bitmap_obstack pta_obstack;
209
 
210
/* Used for oldsolution members of variables. */
211
static bitmap_obstack oldpta_obstack;
212
 
213
/* Used for per-solver-iteration bitmaps.  */
214
static bitmap_obstack iteration_obstack;
215
 
216
static unsigned int create_variable_info_for (tree, const char *);
217
typedef struct constraint_graph *constraint_graph_t;
218
static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
219
 
220
struct constraint;
221
typedef struct constraint *constraint_t;
222
 
223
DEF_VEC_P(constraint_t);
224
DEF_VEC_ALLOC_P(constraint_t,heap);
225
 
226
#define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d)        \
227
  if (a)                                                \
228
    EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
229
 
230
static struct constraint_stats
231
{
232
  unsigned int total_vars;
233
  unsigned int nonpointer_vars;
234
  unsigned int unified_vars_static;
235
  unsigned int unified_vars_dynamic;
236
  unsigned int iterations;
237
  unsigned int num_edges;
238
  unsigned int num_implicit_edges;
239
  unsigned int points_to_sets_created;
240
} stats;
241
 
242
struct variable_info
243
{
244
  /* ID of this variable  */
245
  unsigned int id;
246
 
247
  /* True if this is a variable created by the constraint analysis, such as
248
     heap variables and constraints we had to break up.  */
249
  unsigned int is_artificial_var : 1;
250
 
251
  /* True if this is a special variable whose solution set should not be
252
     changed.  */
253
  unsigned int is_special_var : 1;
254
 
255
  /* True for variables whose size is not known or variable.  */
256
  unsigned int is_unknown_size_var : 1;
257
 
258
  /* True for (sub-)fields that represent a whole variable.  */
259
  unsigned int is_full_var : 1;
260
 
261
  /* True if this is a heap variable.  */
262
  unsigned int is_heap_var : 1;
263
 
264
  /* True if this field may contain pointers.  */
265
  unsigned int may_have_pointers : 1;
266
 
267
  /* True if this field has only restrict qualified pointers.  */
268
  unsigned int only_restrict_pointers : 1;
269
 
270
  /* True if this represents a global variable.  */
271
  unsigned int is_global_var : 1;
272
 
273
  /* True if this represents a IPA function info.  */
274
  unsigned int is_fn_info : 1;
275
 
276
  /* A link to the variable for the next field in this structure.  */
277
  struct variable_info *next;
278
 
279
  /* Offset of this variable, in bits, from the base variable  */
280
  unsigned HOST_WIDE_INT offset;
281
 
282
  /* Size of the variable, in bits.  */
283
  unsigned HOST_WIDE_INT size;
284
 
285
  /* Full size of the base variable, in bits.  */
286
  unsigned HOST_WIDE_INT fullsize;
287
 
288
  /* Name of this variable */
289
  const char *name;
290
 
291
  /* Tree that this variable is associated with.  */
292
  tree decl;
293
 
294
  /* Points-to set for this variable.  */
295
  bitmap solution;
296
 
297
  /* Old points-to set for this variable.  */
298
  bitmap oldsolution;
299
};
300
typedef struct variable_info *varinfo_t;
301
 
302
static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
303
static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
304
                                                   unsigned HOST_WIDE_INT);
305
static varinfo_t lookup_vi_for_tree (tree);
306
static inline bool type_can_have_subvars (const_tree);
307
 
308
/* Pool of variable info structures.  */
309
static alloc_pool variable_info_pool;
310
 
311
DEF_VEC_P(varinfo_t);
312
 
313
DEF_VEC_ALLOC_P(varinfo_t, heap);
314
 
315
/* Table of variable info structures for constraint variables.
316
   Indexed directly by variable info id.  */
317
static VEC(varinfo_t,heap) *varmap;
318
 
319
/* Return the varmap element N */
320
 
321
static inline varinfo_t
322
get_varinfo (unsigned int n)
323
{
324
  return VEC_index (varinfo_t, varmap, n);
325
}
326
 
327
/* Static IDs for the special variables.  */
328
enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
329
       escaped_id = 3, nonlocal_id = 4,
330
       storedanything_id = 5, integer_id = 6 };
331
 
332
/* Return a new variable info structure consisting for a variable
333
   named NAME, and using constraint graph node NODE.  Append it
334
   to the vector of variable info structures.  */
335
 
336
static varinfo_t
337
new_var_info (tree t, const char *name)
338
{
339
  unsigned index = VEC_length (varinfo_t, varmap);
340
  varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
341
 
342
  ret->id = index;
343
  ret->name = name;
344
  ret->decl = t;
345
  /* Vars without decl are artificial and do not have sub-variables.  */
346
  ret->is_artificial_var = (t == NULL_TREE);
347
  ret->is_special_var = false;
348
  ret->is_unknown_size_var = false;
349
  ret->is_full_var = (t == NULL_TREE);
350
  ret->is_heap_var = false;
351
  ret->may_have_pointers = true;
352
  ret->only_restrict_pointers = false;
353
  ret->is_global_var = (t == NULL_TREE);
354
  ret->is_fn_info = false;
355
  if (t && DECL_P (t))
356
    ret->is_global_var = (is_global_var (t)
357
                          /* We have to treat even local register variables
358
                             as escape points.  */
359
                          || (TREE_CODE (t) == VAR_DECL
360
                              && DECL_HARD_REGISTER (t)));
361
  ret->solution = BITMAP_ALLOC (&pta_obstack);
362
  ret->oldsolution = NULL;
363
  ret->next = NULL;
364
 
365
  stats.total_vars++;
366
 
367
  VEC_safe_push (varinfo_t, heap, varmap, ret);
368
 
369
  return ret;
370
}
371
 
372
 
373
/* A map mapping call statements to per-stmt variables for uses
374
   and clobbers specific to the call.  */
375
struct pointer_map_t *call_stmt_vars;
376
 
377
/* Lookup or create the variable for the call statement CALL.  */
378
 
379
static varinfo_t
380
get_call_vi (gimple call)
381
{
382
  void **slot_p;
383
  varinfo_t vi, vi2;
384
 
385
  slot_p = pointer_map_insert (call_stmt_vars, call);
386
  if (*slot_p)
387
    return (varinfo_t) *slot_p;
388
 
389
  vi = new_var_info (NULL_TREE, "CALLUSED");
390
  vi->offset = 0;
391
  vi->size = 1;
392
  vi->fullsize = 2;
393
  vi->is_full_var = true;
394
 
395
  vi->next = vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
396
  vi2->offset = 1;
397
  vi2->size = 1;
398
  vi2->fullsize = 2;
399
  vi2->is_full_var = true;
400
 
401
  *slot_p = (void *) vi;
402
  return vi;
403
}
404
 
405
/* Lookup the variable for the call statement CALL representing
406
   the uses.  Returns NULL if there is nothing special about this call.  */
407
 
408
static varinfo_t
409
lookup_call_use_vi (gimple call)
410
{
411
  void **slot_p;
412
 
413
  slot_p = pointer_map_contains (call_stmt_vars, call);
414
  if (slot_p)
415
    return (varinfo_t) *slot_p;
416
 
417
  return NULL;
418
}
419
 
420
/* Lookup the variable for the call statement CALL representing
421
   the clobbers.  Returns NULL if there is nothing special about this call.  */
422
 
423
static varinfo_t
424
lookup_call_clobber_vi (gimple call)
425
{
426
  varinfo_t uses = lookup_call_use_vi (call);
427
  if (!uses)
428
    return NULL;
429
 
430
  return uses->next;
431
}
432
 
433
/* Lookup or create the variable for the call statement CALL representing
434
   the uses.  */
435
 
436
static varinfo_t
437
get_call_use_vi (gimple call)
438
{
439
  return get_call_vi (call);
440
}
441
 
442
/* Lookup or create the variable for the call statement CALL representing
443
   the clobbers.  */
444
 
445
static varinfo_t ATTRIBUTE_UNUSED
446
get_call_clobber_vi (gimple call)
447
{
448
  return get_call_vi (call)->next;
449
}
450
 
451
 
452
typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
453
 
454
/* An expression that appears in a constraint.  */
455
 
456
struct constraint_expr
457
{
458
  /* Constraint type.  */
459
  constraint_expr_type type;
460
 
461
  /* Variable we are referring to in the constraint.  */
462
  unsigned int var;
463
 
464
  /* Offset, in bits, of this constraint from the beginning of
465
     variables it ends up referring to.
466
 
467
     IOW, in a deref constraint, we would deref, get the result set,
468
     then add OFFSET to each member.   */
469
  HOST_WIDE_INT offset;
470
};
471
 
472
/* Use 0x8000... as special unknown offset.  */
473
#define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
474
 
475
typedef struct constraint_expr ce_s;
476
DEF_VEC_O(ce_s);
477
DEF_VEC_ALLOC_O(ce_s, heap);
478
static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool, bool);
479
static void get_constraint_for (tree, VEC(ce_s, heap) **);
480
static void get_constraint_for_rhs (tree, VEC(ce_s, heap) **);
481
static void do_deref (VEC (ce_s, heap) **);
482
 
483
/* Our set constraints are made up of two constraint expressions, one
484
   LHS, and one RHS.
485
 
486
   As described in the introduction, our set constraints each represent an
487
   operation between set valued variables.
488
*/
489
struct constraint
490
{
491
  struct constraint_expr lhs;
492
  struct constraint_expr rhs;
493
};
494
 
495
/* List of constraints that we use to build the constraint graph from.  */
496
 
497
static VEC(constraint_t,heap) *constraints;
498
static alloc_pool constraint_pool;
499
 
500
/* The constraint graph is represented as an array of bitmaps
501
   containing successor nodes.  */
502
 
503
struct constraint_graph
504
{
505
  /* Size of this graph, which may be different than the number of
506
     nodes in the variable map.  */
507
  unsigned int size;
508
 
509
  /* Explicit successors of each node. */
510
  bitmap *succs;
511
 
512
  /* Implicit predecessors of each node (Used for variable
513
     substitution). */
514
  bitmap *implicit_preds;
515
 
516
  /* Explicit predecessors of each node (Used for variable substitution).  */
517
  bitmap *preds;
518
 
519
  /* Indirect cycle representatives, or -1 if the node has no indirect
520
     cycles.  */
521
  int *indirect_cycles;
522
 
523
  /* Representative node for a node.  rep[a] == a unless the node has
524
     been unified. */
525
  unsigned int *rep;
526
 
527
  /* Equivalence class representative for a label.  This is used for
528
     variable substitution.  */
529
  int *eq_rep;
530
 
531
  /* Pointer equivalence label for a node.  All nodes with the same
532
     pointer equivalence label can be unified together at some point
533
     (either during constraint optimization or after the constraint
534
     graph is built).  */
535
  unsigned int *pe;
536
 
537
  /* Pointer equivalence representative for a label.  This is used to
538
     handle nodes that are pointer equivalent but not location
539
     equivalent.  We can unite these once the addressof constraints
540
     are transformed into initial points-to sets.  */
541
  int *pe_rep;
542
 
543
  /* Pointer equivalence label for each node, used during variable
544
     substitution.  */
545
  unsigned int *pointer_label;
546
 
547
  /* Location equivalence label for each node, used during location
548
     equivalence finding.  */
549
  unsigned int *loc_label;
550
 
551
  /* Pointed-by set for each node, used during location equivalence
552
     finding.  This is pointed-by rather than pointed-to, because it
553
     is constructed using the predecessor graph.  */
554
  bitmap *pointed_by;
555
 
556
  /* Points to sets for pointer equivalence.  This is *not* the actual
557
     points-to sets for nodes.  */
558
  bitmap *points_to;
559
 
560
  /* Bitmap of nodes where the bit is set if the node is a direct
561
     node.  Used for variable substitution.  */
562
  sbitmap direct_nodes;
563
 
564
  /* Bitmap of nodes where the bit is set if the node is address
565
     taken.  Used for variable substitution.  */
566
  bitmap address_taken;
567
 
568
  /* Vector of complex constraints for each graph node.  Complex
569
     constraints are those involving dereferences or offsets that are
570
     not 0.  */
571
  VEC(constraint_t,heap) **complex;
572
};
573
 
574
static constraint_graph_t graph;
575
 
576
/* During variable substitution and the offline version of indirect
577
   cycle finding, we create nodes to represent dereferences and
578
   address taken constraints.  These represent where these start and
579
   end.  */
580
#define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
581
#define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
582
 
583
/* Return the representative node for NODE, if NODE has been unioned
584
   with another NODE.
585
   This function performs path compression along the way to finding
586
   the representative.  */
587
 
588
static unsigned int
589
find (unsigned int node)
590
{
591
  gcc_assert (node < graph->size);
592
  if (graph->rep[node] != node)
593
    return graph->rep[node] = find (graph->rep[node]);
594
  return node;
595
}
596
 
597
/* Union the TO and FROM nodes to the TO nodes.
598
   Note that at some point in the future, we may want to do
599
   union-by-rank, in which case we are going to have to return the
600
   node we unified to.  */
601
 
602
static bool
603
unite (unsigned int to, unsigned int from)
604
{
605
  gcc_assert (to < graph->size && from < graph->size);
606
  if (to != from && graph->rep[from] != to)
607
    {
608
      graph->rep[from] = to;
609
      return true;
610
    }
611
  return false;
612
}
613
 
614
/* Create a new constraint consisting of LHS and RHS expressions.  */
615
 
616
static constraint_t
617
new_constraint (const struct constraint_expr lhs,
618
                const struct constraint_expr rhs)
619
{
620
  constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
621
  ret->lhs = lhs;
622
  ret->rhs = rhs;
623
  return ret;
624
}
625
 
626
/* Print out constraint C to FILE.  */
627
 
628
static void
629
dump_constraint (FILE *file, constraint_t c)
630
{
631
  if (c->lhs.type == ADDRESSOF)
632
    fprintf (file, "&");
633
  else if (c->lhs.type == DEREF)
634
    fprintf (file, "*");
635
  fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
636
  if (c->lhs.offset == UNKNOWN_OFFSET)
637
    fprintf (file, " + UNKNOWN");
638
  else if (c->lhs.offset != 0)
639
    fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
640
  fprintf (file, " = ");
641
  if (c->rhs.type == ADDRESSOF)
642
    fprintf (file, "&");
643
  else if (c->rhs.type == DEREF)
644
    fprintf (file, "*");
645
  fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
646
  if (c->rhs.offset == UNKNOWN_OFFSET)
647
    fprintf (file, " + UNKNOWN");
648
  else if (c->rhs.offset != 0)
649
    fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
650
}
651
 
652
 
653
void debug_constraint (constraint_t);
654
void debug_constraints (void);
655
void debug_constraint_graph (void);
656
void debug_solution_for_var (unsigned int);
657
void debug_sa_points_to_info (void);
658
 
659
/* Print out constraint C to stderr.  */
660
 
661
DEBUG_FUNCTION void
662
debug_constraint (constraint_t c)
663
{
664
  dump_constraint (stderr, c);
665
  fprintf (stderr, "\n");
666
}
667
 
668
/* Print out all constraints to FILE */
669
 
670
static void
671
dump_constraints (FILE *file, int from)
672
{
673
  int i;
674
  constraint_t c;
675
  for (i = from; VEC_iterate (constraint_t, constraints, i, c); i++)
676
    if (c)
677
      {
678
        dump_constraint (file, c);
679
        fprintf (file, "\n");
680
      }
681
}
682
 
683
/* Print out all constraints to stderr.  */
684
 
685
DEBUG_FUNCTION void
686
debug_constraints (void)
687
{
688
  dump_constraints (stderr, 0);
689
}
690
 
691
/* Print the constraint graph in dot format.  */
692
 
693
static void
694
dump_constraint_graph (FILE *file)
695
{
696
  unsigned int i;
697
 
698
  /* Only print the graph if it has already been initialized:  */
699
  if (!graph)
700
    return;
701
 
702
  /* Prints the header of the dot file:  */
703
  fprintf (file, "strict digraph {\n");
704
  fprintf (file, "  node [\n    shape = box\n  ]\n");
705
  fprintf (file, "  edge [\n    fontsize = \"12\"\n  ]\n");
706
  fprintf (file, "\n  // List of nodes and complex constraints in "
707
           "the constraint graph:\n");
708
 
709
  /* The next lines print the nodes in the graph together with the
710
     complex constraints attached to them.  */
711
  for (i = 0; i < graph->size; i++)
712
    {
713
      if (find (i) != i)
714
        continue;
715
      if (i < FIRST_REF_NODE)
716
        fprintf (file, "\"%s\"", get_varinfo (i)->name);
717
      else
718
        fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
719
      if (graph->complex[i])
720
        {
721
          unsigned j;
722
          constraint_t c;
723
          fprintf (file, " [label=\"\\N\\n");
724
          for (j = 0; VEC_iterate (constraint_t, graph->complex[i], j, c); ++j)
725
            {
726
              dump_constraint (file, c);
727
              fprintf (file, "\\l");
728
            }
729
          fprintf (file, "\"]");
730
        }
731
      fprintf (file, ";\n");
732
    }
733
 
734
  /* Go over the edges.  */
735
  fprintf (file, "\n  // Edges in the constraint graph:\n");
736
  for (i = 0; i < graph->size; i++)
737
    {
738
      unsigned j;
739
      bitmap_iterator bi;
740
      if (find (i) != i)
741
        continue;
742
      EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
743
        {
744
          unsigned to = find (j);
745
          if (i == to)
746
            continue;
747
          if (i < FIRST_REF_NODE)
748
            fprintf (file, "\"%s\"", get_varinfo (i)->name);
749
          else
750
            fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
751
          fprintf (file, " -> ");
752
          if (to < FIRST_REF_NODE)
753
            fprintf (file, "\"%s\"", get_varinfo (to)->name);
754
          else
755
            fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
756
          fprintf (file, ";\n");
757
        }
758
    }
759
 
760
  /* Prints the tail of the dot file.  */
761
  fprintf (file, "}\n");
762
}
763
 
764
/* Print out the constraint graph to stderr.  */
765
 
766
DEBUG_FUNCTION void
767
debug_constraint_graph (void)
768
{
769
  dump_constraint_graph (stderr);
770
}
771
 
772
/* SOLVER FUNCTIONS
773
 
774
   The solver is a simple worklist solver, that works on the following
775
   algorithm:
776
 
777
   sbitmap changed_nodes = all zeroes;
778
   changed_count = 0;
779
   For each node that is not already collapsed:
780
       changed_count++;
781
       set bit in changed nodes
782
 
783
   while (changed_count > 0)
784
   {
785
     compute topological ordering for constraint graph
786
 
787
     find and collapse cycles in the constraint graph (updating
788
     changed if necessary)
789
 
790
     for each node (n) in the graph in topological order:
791
       changed_count--;
792
 
793
       Process each complex constraint associated with the node,
794
       updating changed if necessary.
795
 
796
       For each outgoing edge from n, propagate the solution from n to
797
       the destination of the edge, updating changed as necessary.
798
 
799
   }  */
800
 
801
/* Return true if two constraint expressions A and B are equal.  */
802
 
803
static bool
804
constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
805
{
806
  return a.type == b.type && a.var == b.var && a.offset == b.offset;
807
}
808
 
809
/* Return true if constraint expression A is less than constraint expression
810
   B.  This is just arbitrary, but consistent, in order to give them an
811
   ordering.  */
812
 
813
static bool
814
constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
815
{
816
  if (a.type == b.type)
817
    {
818
      if (a.var == b.var)
819
        return a.offset < b.offset;
820
      else
821
        return a.var < b.var;
822
    }
823
  else
824
    return a.type < b.type;
825
}
826
 
827
/* Return true if constraint A is less than constraint B.  This is just
828
   arbitrary, but consistent, in order to give them an ordering.  */
829
 
830
static bool
831
constraint_less (const constraint_t a, const constraint_t b)
832
{
833
  if (constraint_expr_less (a->lhs, b->lhs))
834
    return true;
835
  else if (constraint_expr_less (b->lhs, a->lhs))
836
    return false;
837
  else
838
    return constraint_expr_less (a->rhs, b->rhs);
839
}
840
 
841
/* Return true if two constraints A and B are equal.  */
842
 
843
static bool
844
constraint_equal (struct constraint a, struct constraint b)
845
{
846
  return constraint_expr_equal (a.lhs, b.lhs)
847
    && constraint_expr_equal (a.rhs, b.rhs);
848
}
849
 
850
 
851
/* Find a constraint LOOKFOR in the sorted constraint vector VEC */
852
 
853
static constraint_t
854
constraint_vec_find (VEC(constraint_t,heap) *vec,
855
                     struct constraint lookfor)
856
{
857
  unsigned int place;
858
  constraint_t found;
859
 
860
  if (vec == NULL)
861
    return NULL;
862
 
863
  place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
864
  if (place >= VEC_length (constraint_t, vec))
865
    return NULL;
866
  found = VEC_index (constraint_t, vec, place);
867
  if (!constraint_equal (*found, lookfor))
868
    return NULL;
869
  return found;
870
}
871
 
872
/* Union two constraint vectors, TO and FROM.  Put the result in TO.  */
873
 
874
static void
875
constraint_set_union (VEC(constraint_t,heap) **to,
876
                      VEC(constraint_t,heap) **from)
877
{
878
  int i;
879
  constraint_t c;
880
 
881
  FOR_EACH_VEC_ELT (constraint_t, *from, i, c)
882
    {
883
      if (constraint_vec_find (*to, *c) == NULL)
884
        {
885
          unsigned int place = VEC_lower_bound (constraint_t, *to, c,
886
                                                constraint_less);
887
          VEC_safe_insert (constraint_t, heap, *to, place, c);
888
        }
889
    }
890
}
891
 
892
/* Expands the solution in SET to all sub-fields of variables included.
893
   Union the expanded result into RESULT.  */
894
 
895
static void
896
solution_set_expand (bitmap result, bitmap set)
897
{
898
  bitmap_iterator bi;
899
  bitmap vars = NULL;
900
  unsigned j;
901
 
902
  /* In a first pass record all variables we need to add all
903
     sub-fields off.  This avoids quadratic behavior.  */
904
  EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
905
    {
906
      varinfo_t v = get_varinfo (j);
907
      if (v->is_artificial_var
908
          || v->is_full_var)
909
        continue;
910
      v = lookup_vi_for_tree (v->decl);
911
      if (vars == NULL)
912
        vars = BITMAP_ALLOC (NULL);
913
      bitmap_set_bit (vars, v->id);
914
    }
915
 
916
  /* In the second pass now do the addition to the solution and
917
     to speed up solving add it to the delta as well.  */
918
  if (vars != NULL)
919
    {
920
      EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
921
        {
922
          varinfo_t v = get_varinfo (j);
923
          for (; v != NULL; v = v->next)
924
            bitmap_set_bit (result, v->id);
925
        }
926
      BITMAP_FREE (vars);
927
    }
928
}
929
 
930
/* Take a solution set SET, add OFFSET to each member of the set, and
931
   overwrite SET with the result when done.  */
932
 
933
static void
934
solution_set_add (bitmap set, HOST_WIDE_INT offset)
935
{
936
  bitmap result = BITMAP_ALLOC (&iteration_obstack);
937
  unsigned int i;
938
  bitmap_iterator bi;
939
 
940
  /* If the offset is unknown we have to expand the solution to
941
     all subfields.  */
942
  if (offset == UNKNOWN_OFFSET)
943
    {
944
      solution_set_expand (set, set);
945
      return;
946
    }
947
 
948
  EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
949
    {
950
      varinfo_t vi = get_varinfo (i);
951
 
952
      /* If this is a variable with just one field just set its bit
953
         in the result.  */
954
      if (vi->is_artificial_var
955
          || vi->is_unknown_size_var
956
          || vi->is_full_var)
957
        bitmap_set_bit (result, i);
958
      else
959
        {
960
          unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
961
 
962
          /* If the offset makes the pointer point to before the
963
             variable use offset zero for the field lookup.  */
964
          if (offset < 0
965
              && fieldoffset > vi->offset)
966
            fieldoffset = 0;
967
 
968
          if (offset != 0)
969
            vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
970
 
971
          bitmap_set_bit (result, vi->id);
972
          /* If the result is not exactly at fieldoffset include the next
973
             field as well.  See get_constraint_for_ptr_offset for more
974
             rationale.  */
975
          if (vi->offset != fieldoffset
976
              && vi->next != NULL)
977
            bitmap_set_bit (result, vi->next->id);
978
        }
979
    }
980
 
981
  bitmap_copy (set, result);
982
  BITMAP_FREE (result);
983
}
984
 
985
/* Union solution sets TO and FROM, and add INC to each member of FROM in the
986
   process.  */
987
 
988
static bool
989
set_union_with_increment  (bitmap to, bitmap from, HOST_WIDE_INT inc)
990
{
991
  if (inc == 0)
992
    return bitmap_ior_into (to, from);
993
  else
994
    {
995
      bitmap tmp;
996
      bool res;
997
 
998
      tmp = BITMAP_ALLOC (&iteration_obstack);
999
      bitmap_copy (tmp, from);
1000
      solution_set_add (tmp, inc);
1001
      res = bitmap_ior_into (to, tmp);
1002
      BITMAP_FREE (tmp);
1003
      return res;
1004
    }
1005
}
1006
 
1007
/* Insert constraint C into the list of complex constraints for graph
1008
   node VAR.  */
1009
 
1010
static void
1011
insert_into_complex (constraint_graph_t graph,
1012
                     unsigned int var, constraint_t c)
1013
{
1014
  VEC (constraint_t, heap) *complex = graph->complex[var];
1015
  unsigned int place = VEC_lower_bound (constraint_t, complex, c,
1016
                                        constraint_less);
1017
 
1018
  /* Only insert constraints that do not already exist.  */
1019
  if (place >= VEC_length (constraint_t, complex)
1020
      || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
1021
    VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
1022
}
1023
 
1024
 
1025
/* Condense two variable nodes into a single variable node, by moving
1026
   all associated info from SRC to TO.  */
1027
 
1028
static void
1029
merge_node_constraints (constraint_graph_t graph, unsigned int to,
1030
                        unsigned int from)
1031
{
1032
  unsigned int i;
1033
  constraint_t c;
1034
 
1035
  gcc_assert (find (from) == to);
1036
 
1037
  /* Move all complex constraints from src node into to node  */
1038
  FOR_EACH_VEC_ELT (constraint_t, graph->complex[from], i, c)
1039
    {
1040
      /* In complex constraints for node src, we may have either
1041
         a = *src, and *src = a, or an offseted constraint which are
1042
         always added to the rhs node's constraints.  */
1043
 
1044
      if (c->rhs.type == DEREF)
1045
        c->rhs.var = to;
1046
      else if (c->lhs.type == DEREF)
1047
        c->lhs.var = to;
1048
      else
1049
        c->rhs.var = to;
1050
    }
1051
  constraint_set_union (&graph->complex[to], &graph->complex[from]);
1052
  VEC_free (constraint_t, heap, graph->complex[from]);
1053
  graph->complex[from] = NULL;
1054
}
1055
 
1056
 
1057
/* Remove edges involving NODE from GRAPH.  */
1058
 
1059
static void
1060
clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1061
{
1062
  if (graph->succs[node])
1063
    BITMAP_FREE (graph->succs[node]);
1064
}
1065
 
1066
/* Merge GRAPH nodes FROM and TO into node TO.  */
1067
 
1068
static void
1069
merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1070
                   unsigned int from)
1071
{
1072
  if (graph->indirect_cycles[from] != -1)
1073
    {
1074
      /* If we have indirect cycles with the from node, and we have
1075
         none on the to node, the to node has indirect cycles from the
1076
         from node now that they are unified.
1077
         If indirect cycles exist on both, unify the nodes that they
1078
         are in a cycle with, since we know they are in a cycle with
1079
         each other.  */
1080
      if (graph->indirect_cycles[to] == -1)
1081
        graph->indirect_cycles[to] = graph->indirect_cycles[from];
1082
    }
1083
 
1084
  /* Merge all the successor edges.  */
1085
  if (graph->succs[from])
1086
    {
1087
      if (!graph->succs[to])
1088
        graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1089
      bitmap_ior_into (graph->succs[to],
1090
                       graph->succs[from]);
1091
    }
1092
 
1093
  clear_edges_for_node (graph, from);
1094
}
1095
 
1096
 
1097
/* Add an indirect graph edge to GRAPH, going from TO to FROM if
1098
   it doesn't exist in the graph already.  */
1099
 
1100
static void
1101
add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1102
                         unsigned int from)
1103
{
1104
  if (to == from)
1105
    return;
1106
 
1107
  if (!graph->implicit_preds[to])
1108
    graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1109
 
1110
  if (bitmap_set_bit (graph->implicit_preds[to], from))
1111
    stats.num_implicit_edges++;
1112
}
1113
 
1114
/* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1115
   it doesn't exist in the graph already.
1116
   Return false if the edge already existed, true otherwise.  */
1117
 
1118
static void
1119
add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1120
                     unsigned int from)
1121
{
1122
  if (!graph->preds[to])
1123
    graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1124
  bitmap_set_bit (graph->preds[to], from);
1125
}
1126
 
1127
/* Add a graph edge to GRAPH, going from FROM to TO if
1128
   it doesn't exist in the graph already.
1129
   Return false if the edge already existed, true otherwise.  */
1130
 
1131
static bool
1132
add_graph_edge (constraint_graph_t graph, unsigned int to,
1133
                unsigned int from)
1134
{
1135
  if (to == from)
1136
    {
1137
      return false;
1138
    }
1139
  else
1140
    {
1141
      bool r = false;
1142
 
1143
      if (!graph->succs[from])
1144
        graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1145
      if (bitmap_set_bit (graph->succs[from], to))
1146
        {
1147
          r = true;
1148
          if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1149
            stats.num_edges++;
1150
        }
1151
      return r;
1152
    }
1153
}
1154
 
1155
 
1156
/* Return true if {DEST.SRC} is an existing graph edge in GRAPH.  */
1157
 
1158
static bool
1159
valid_graph_edge (constraint_graph_t graph, unsigned int src,
1160
                  unsigned int dest)
1161
{
1162
  return (graph->succs[dest]
1163
          && bitmap_bit_p (graph->succs[dest], src));
1164
}
1165
 
1166
/* Initialize the constraint graph structure to contain SIZE nodes.  */
1167
 
1168
static void
1169
init_graph (unsigned int size)
1170
{
1171
  unsigned int j;
1172
 
1173
  graph = XCNEW (struct constraint_graph);
1174
  graph->size = size;
1175
  graph->succs = XCNEWVEC (bitmap, graph->size);
1176
  graph->indirect_cycles = XNEWVEC (int, graph->size);
1177
  graph->rep = XNEWVEC (unsigned int, graph->size);
1178
  graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
1179
  graph->pe = XCNEWVEC (unsigned int, graph->size);
1180
  graph->pe_rep = XNEWVEC (int, graph->size);
1181
 
1182
  for (j = 0; j < graph->size; j++)
1183
    {
1184
      graph->rep[j] = j;
1185
      graph->pe_rep[j] = -1;
1186
      graph->indirect_cycles[j] = -1;
1187
    }
1188
}
1189
 
1190
/* Build the constraint graph, adding only predecessor edges right now.  */
1191
 
1192
static void
1193
build_pred_graph (void)
1194
{
1195
  int i;
1196
  constraint_t c;
1197
  unsigned int j;
1198
 
1199
  graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1200
  graph->preds = XCNEWVEC (bitmap, graph->size);
1201
  graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1202
  graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1203
  graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1204
  graph->points_to = XCNEWVEC (bitmap, graph->size);
1205
  graph->eq_rep = XNEWVEC (int, graph->size);
1206
  graph->direct_nodes = sbitmap_alloc (graph->size);
1207
  graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1208
  sbitmap_zero (graph->direct_nodes);
1209
 
1210
  for (j = 0; j < FIRST_REF_NODE; j++)
1211
    {
1212
      if (!get_varinfo (j)->is_special_var)
1213
        SET_BIT (graph->direct_nodes, j);
1214
    }
1215
 
1216
  for (j = 0; j < graph->size; j++)
1217
    graph->eq_rep[j] = -1;
1218
 
1219
  for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1220
    graph->indirect_cycles[j] = -1;
1221
 
1222
  FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
1223
    {
1224
      struct constraint_expr lhs = c->lhs;
1225
      struct constraint_expr rhs = c->rhs;
1226
      unsigned int lhsvar = lhs.var;
1227
      unsigned int rhsvar = rhs.var;
1228
 
1229
      if (lhs.type == DEREF)
1230
        {
1231
          /* *x = y.  */
1232
          if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1233
            add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1234
        }
1235
      else if (rhs.type == DEREF)
1236
        {
1237
          /* x = *y */
1238
          if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1239
            add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1240
          else
1241
            RESET_BIT (graph->direct_nodes, lhsvar);
1242
        }
1243
      else if (rhs.type == ADDRESSOF)
1244
        {
1245
          varinfo_t v;
1246
 
1247
          /* x = &y */
1248
          if (graph->points_to[lhsvar] == NULL)
1249
            graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1250
          bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1251
 
1252
          if (graph->pointed_by[rhsvar] == NULL)
1253
            graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1254
          bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1255
 
1256
          /* Implicitly, *x = y */
1257
          add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1258
 
1259
          /* All related variables are no longer direct nodes.  */
1260
          RESET_BIT (graph->direct_nodes, rhsvar);
1261
          v = get_varinfo (rhsvar);
1262
          if (!v->is_full_var)
1263
            {
1264
              v = lookup_vi_for_tree (v->decl);
1265
              do
1266
                {
1267
                  RESET_BIT (graph->direct_nodes, v->id);
1268
                  v = v->next;
1269
                }
1270
              while (v != NULL);
1271
            }
1272
          bitmap_set_bit (graph->address_taken, rhsvar);
1273
        }
1274
      else if (lhsvar > anything_id
1275
               && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1276
        {
1277
          /* x = y */
1278
          add_pred_graph_edge (graph, lhsvar, rhsvar);
1279
          /* Implicitly, *x = *y */
1280
          add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1281
                                   FIRST_REF_NODE + rhsvar);
1282
        }
1283
      else if (lhs.offset != 0 || rhs.offset != 0)
1284
        {
1285
          if (rhs.offset != 0)
1286
            RESET_BIT (graph->direct_nodes, lhs.var);
1287
          else if (lhs.offset != 0)
1288
            RESET_BIT (graph->direct_nodes, rhs.var);
1289
        }
1290
    }
1291
}
1292
 
1293
/* Build the constraint graph, adding successor edges.  */
1294
 
1295
static void
1296
build_succ_graph (void)
1297
{
1298
  unsigned i, t;
1299
  constraint_t c;
1300
 
1301
  FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
1302
    {
1303
      struct constraint_expr lhs;
1304
      struct constraint_expr rhs;
1305
      unsigned int lhsvar;
1306
      unsigned int rhsvar;
1307
 
1308
      if (!c)
1309
        continue;
1310
 
1311
      lhs = c->lhs;
1312
      rhs = c->rhs;
1313
      lhsvar = find (lhs.var);
1314
      rhsvar = find (rhs.var);
1315
 
1316
      if (lhs.type == DEREF)
1317
        {
1318
          if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1319
            add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1320
        }
1321
      else if (rhs.type == DEREF)
1322
        {
1323
          if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1324
            add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1325
        }
1326
      else if (rhs.type == ADDRESSOF)
1327
        {
1328
          /* x = &y */
1329
          gcc_assert (find (rhs.var) == rhs.var);
1330
          bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1331
        }
1332
      else if (lhsvar > anything_id
1333
               && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1334
        {
1335
          add_graph_edge (graph, lhsvar, rhsvar);
1336
        }
1337
    }
1338
 
1339
  /* Add edges from STOREDANYTHING to all non-direct nodes that can
1340
     receive pointers.  */
1341
  t = find (storedanything_id);
1342
  for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1343
    {
1344
      if (!TEST_BIT (graph->direct_nodes, i)
1345
          && get_varinfo (i)->may_have_pointers)
1346
        add_graph_edge (graph, find (i), t);
1347
    }
1348
 
1349
  /* Everything stored to ANYTHING also potentially escapes.  */
1350
  add_graph_edge (graph, find (escaped_id), t);
1351
}
1352
 
1353
 
1354
/* Changed variables on the last iteration.  */
1355
static bitmap changed;
1356
 
1357
/* Strongly Connected Component visitation info.  */
1358
 
1359
struct scc_info
1360
{
1361
  sbitmap visited;
1362
  sbitmap deleted;
1363
  unsigned int *dfs;
1364
  unsigned int *node_mapping;
1365
  int current_index;
1366
  VEC(unsigned,heap) *scc_stack;
1367
};
1368
 
1369
 
1370
/* Recursive routine to find strongly connected components in GRAPH.
1371
   SI is the SCC info to store the information in, and N is the id of current
1372
   graph node we are processing.
1373
 
1374
   This is Tarjan's strongly connected component finding algorithm, as
1375
   modified by Nuutila to keep only non-root nodes on the stack.
1376
   The algorithm can be found in "On finding the strongly connected
1377
   connected components in a directed graph" by Esko Nuutila and Eljas
1378
   Soisalon-Soininen, in Information Processing Letters volume 49,
1379
   number 1, pages 9-14.  */
1380
 
1381
static void
1382
scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1383
{
1384
  unsigned int i;
1385
  bitmap_iterator bi;
1386
  unsigned int my_dfs;
1387
 
1388
  SET_BIT (si->visited, n);
1389
  si->dfs[n] = si->current_index ++;
1390
  my_dfs = si->dfs[n];
1391
 
1392
  /* Visit all the successors.  */
1393
  EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1394
    {
1395
      unsigned int w;
1396
 
1397
      if (i > LAST_REF_NODE)
1398
        break;
1399
 
1400
      w = find (i);
1401
      if (TEST_BIT (si->deleted, w))
1402
        continue;
1403
 
1404
      if (!TEST_BIT (si->visited, w))
1405
        scc_visit (graph, si, w);
1406
      {
1407
        unsigned int t = find (w);
1408
        unsigned int nnode = find (n);
1409
        gcc_assert (nnode == n);
1410
 
1411
        if (si->dfs[t] < si->dfs[nnode])
1412
          si->dfs[n] = si->dfs[t];
1413
      }
1414
    }
1415
 
1416
  /* See if any components have been identified.  */
1417
  if (si->dfs[n] == my_dfs)
1418
    {
1419
      if (VEC_length (unsigned, si->scc_stack) > 0
1420
          && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1421
        {
1422
          bitmap scc = BITMAP_ALLOC (NULL);
1423
          unsigned int lowest_node;
1424
          bitmap_iterator bi;
1425
 
1426
          bitmap_set_bit (scc, n);
1427
 
1428
          while (VEC_length (unsigned, si->scc_stack) != 0
1429
                 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1430
            {
1431
              unsigned int w = VEC_pop (unsigned, si->scc_stack);
1432
 
1433
              bitmap_set_bit (scc, w);
1434
            }
1435
 
1436
          lowest_node = bitmap_first_set_bit (scc);
1437
          gcc_assert (lowest_node < FIRST_REF_NODE);
1438
 
1439
          /* Collapse the SCC nodes into a single node, and mark the
1440
             indirect cycles.  */
1441
          EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1442
            {
1443
              if (i < FIRST_REF_NODE)
1444
                {
1445
                  if (unite (lowest_node, i))
1446
                    unify_nodes (graph, lowest_node, i, false);
1447
                }
1448
              else
1449
                {
1450
                  unite (lowest_node, i);
1451
                  graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1452
                }
1453
            }
1454
        }
1455
      SET_BIT (si->deleted, n);
1456
    }
1457
  else
1458
    VEC_safe_push (unsigned, heap, si->scc_stack, n);
1459
}
1460
 
1461
/* Unify node FROM into node TO, updating the changed count if
1462
   necessary when UPDATE_CHANGED is true.  */
1463
 
1464
static void
1465
unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1466
             bool update_changed)
1467
{
1468
 
1469
  gcc_assert (to != from && find (to) == to);
1470
  if (dump_file && (dump_flags & TDF_DETAILS))
1471
    fprintf (dump_file, "Unifying %s to %s\n",
1472
             get_varinfo (from)->name,
1473
             get_varinfo (to)->name);
1474
 
1475
  if (update_changed)
1476
    stats.unified_vars_dynamic++;
1477
  else
1478
    stats.unified_vars_static++;
1479
 
1480
  merge_graph_nodes (graph, to, from);
1481
  merge_node_constraints (graph, to, from);
1482
 
1483
  /* Mark TO as changed if FROM was changed. If TO was already marked
1484
     as changed, decrease the changed count.  */
1485
 
1486
  if (update_changed
1487
      && bitmap_bit_p (changed, from))
1488
    {
1489
      bitmap_clear_bit (changed, from);
1490
      bitmap_set_bit (changed, to);
1491
    }
1492
  if (get_varinfo (from)->solution)
1493
    {
1494
      /* If the solution changes because of the merging, we need to mark
1495
         the variable as changed.  */
1496
      if (bitmap_ior_into (get_varinfo (to)->solution,
1497
                           get_varinfo (from)->solution))
1498
        {
1499
          if (update_changed)
1500
            bitmap_set_bit (changed, to);
1501
        }
1502
 
1503
      BITMAP_FREE (get_varinfo (from)->solution);
1504
      if (get_varinfo (from)->oldsolution)
1505
        BITMAP_FREE (get_varinfo (from)->oldsolution);
1506
 
1507
      if (stats.iterations > 0
1508
          && get_varinfo (to)->oldsolution)
1509
        BITMAP_FREE (get_varinfo (to)->oldsolution);
1510
    }
1511
  if (valid_graph_edge (graph, to, to))
1512
    {
1513
      if (graph->succs[to])
1514
        bitmap_clear_bit (graph->succs[to], to);
1515
    }
1516
}
1517
 
1518
/* Information needed to compute the topological ordering of a graph.  */
1519
 
1520
struct topo_info
1521
{
1522
  /* sbitmap of visited nodes.  */
1523
  sbitmap visited;
1524
  /* Array that stores the topological order of the graph, *in
1525
     reverse*.  */
1526
  VEC(unsigned,heap) *topo_order;
1527
};
1528
 
1529
 
1530
/* Initialize and return a topological info structure.  */
1531
 
1532
static struct topo_info *
1533
init_topo_info (void)
1534
{
1535
  size_t size = graph->size;
1536
  struct topo_info *ti = XNEW (struct topo_info);
1537
  ti->visited = sbitmap_alloc (size);
1538
  sbitmap_zero (ti->visited);
1539
  ti->topo_order = VEC_alloc (unsigned, heap, 1);
1540
  return ti;
1541
}
1542
 
1543
 
1544
/* Free the topological sort info pointed to by TI.  */
1545
 
1546
static void
1547
free_topo_info (struct topo_info *ti)
1548
{
1549
  sbitmap_free (ti->visited);
1550
  VEC_free (unsigned, heap, ti->topo_order);
1551
  free (ti);
1552
}
1553
 
1554
/* Visit the graph in topological order, and store the order in the
1555
   topo_info structure.  */
1556
 
1557
static void
1558
topo_visit (constraint_graph_t graph, struct topo_info *ti,
1559
            unsigned int n)
1560
{
1561
  bitmap_iterator bi;
1562
  unsigned int j;
1563
 
1564
  SET_BIT (ti->visited, n);
1565
 
1566
  if (graph->succs[n])
1567
    EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1568
      {
1569
        if (!TEST_BIT (ti->visited, j))
1570
          topo_visit (graph, ti, j);
1571
      }
1572
 
1573
  VEC_safe_push (unsigned, heap, ti->topo_order, n);
1574
}
1575
 
1576
/* Process a constraint C that represents x = *(y + off), using DELTA as the
1577
   starting solution for y.  */
1578
 
1579
static void
1580
do_sd_constraint (constraint_graph_t graph, constraint_t c,
1581
                  bitmap delta)
1582
{
1583
  unsigned int lhs = c->lhs.var;
1584
  bool flag = false;
1585
  bitmap sol = get_varinfo (lhs)->solution;
1586
  unsigned int j;
1587
  bitmap_iterator bi;
1588
  HOST_WIDE_INT roffset = c->rhs.offset;
1589
 
1590
  /* Our IL does not allow this.  */
1591
  gcc_assert (c->lhs.offset == 0);
1592
 
1593
  /* If the solution of Y contains anything it is good enough to transfer
1594
     this to the LHS.  */
1595
  if (bitmap_bit_p (delta, anything_id))
1596
    {
1597
      flag |= bitmap_set_bit (sol, anything_id);
1598
      goto done;
1599
    }
1600
 
1601
  /* If we do not know at with offset the rhs is dereferenced compute
1602
     the reachability set of DELTA, conservatively assuming it is
1603
     dereferenced at all valid offsets.  */
1604
  if (roffset == UNKNOWN_OFFSET)
1605
    {
1606
      solution_set_expand (delta, delta);
1607
      /* No further offset processing is necessary.  */
1608
      roffset = 0;
1609
    }
1610
 
1611
  /* For each variable j in delta (Sol(y)), add
1612
     an edge in the graph from j to x, and union Sol(j) into Sol(x).  */
1613
  EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1614
    {
1615
      varinfo_t v = get_varinfo (j);
1616
      HOST_WIDE_INT fieldoffset = v->offset + roffset;
1617
      unsigned int t;
1618
 
1619
      if (v->is_full_var)
1620
        fieldoffset = v->offset;
1621
      else if (roffset != 0)
1622
        v = first_vi_for_offset (v, fieldoffset);
1623
      /* If the access is outside of the variable we can ignore it.  */
1624
      if (!v)
1625
        continue;
1626
 
1627
      do
1628
        {
1629
          t = find (v->id);
1630
 
1631
          /* Adding edges from the special vars is pointless.
1632
             They don't have sets that can change.  */
1633
          if (get_varinfo (t)->is_special_var)
1634
            flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1635
          /* Merging the solution from ESCAPED needlessly increases
1636
             the set.  Use ESCAPED as representative instead.  */
1637
          else if (v->id == escaped_id)
1638
            flag |= bitmap_set_bit (sol, escaped_id);
1639
          else if (v->may_have_pointers
1640
                   && add_graph_edge (graph, lhs, t))
1641
            flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1642
 
1643
          /* If the variable is not exactly at the requested offset
1644
             we have to include the next one.  */
1645
          if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1646
              || v->next == NULL)
1647
            break;
1648
 
1649
          v = v->next;
1650
          fieldoffset = v->offset;
1651
        }
1652
      while (1);
1653
    }
1654
 
1655
done:
1656
  /* If the LHS solution changed, mark the var as changed.  */
1657
  if (flag)
1658
    {
1659
      get_varinfo (lhs)->solution = sol;
1660
      bitmap_set_bit (changed, lhs);
1661
    }
1662
}
1663
 
1664
/* Process a constraint C that represents *(x + off) = y using DELTA
1665
   as the starting solution for x.  */
1666
 
1667
static void
1668
do_ds_constraint (constraint_t c, bitmap delta)
1669
{
1670
  unsigned int rhs = c->rhs.var;
1671
  bitmap sol = get_varinfo (rhs)->solution;
1672
  unsigned int j;
1673
  bitmap_iterator bi;
1674
  HOST_WIDE_INT loff = c->lhs.offset;
1675
  bool escaped_p = false;
1676
 
1677
  /* Our IL does not allow this.  */
1678
  gcc_assert (c->rhs.offset == 0);
1679
 
1680
  /* If the solution of y contains ANYTHING simply use the ANYTHING
1681
     solution.  This avoids needlessly increasing the points-to sets.  */
1682
  if (bitmap_bit_p (sol, anything_id))
1683
    sol = get_varinfo (find (anything_id))->solution;
1684
 
1685
  /* If the solution for x contains ANYTHING we have to merge the
1686
     solution of y into all pointer variables which we do via
1687
     STOREDANYTHING.  */
1688
  if (bitmap_bit_p (delta, anything_id))
1689
    {
1690
      unsigned t = find (storedanything_id);
1691
      if (add_graph_edge (graph, t, rhs))
1692
        {
1693
          if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1694
            bitmap_set_bit (changed, t);
1695
        }
1696
      return;
1697
    }
1698
 
1699
  /* If we do not know at with offset the rhs is dereferenced compute
1700
     the reachability set of DELTA, conservatively assuming it is
1701
     dereferenced at all valid offsets.  */
1702
  if (loff == UNKNOWN_OFFSET)
1703
    {
1704
      solution_set_expand (delta, delta);
1705
      loff = 0;
1706
    }
1707
 
1708
  /* For each member j of delta (Sol(x)), add an edge from y to j and
1709
     union Sol(y) into Sol(j) */
1710
  EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1711
    {
1712
      varinfo_t v = get_varinfo (j);
1713
      unsigned int t;
1714
      HOST_WIDE_INT fieldoffset = v->offset + loff;
1715
 
1716
      if (v->is_full_var)
1717
        fieldoffset = v->offset;
1718
      else if (loff != 0)
1719
        v = first_vi_for_offset (v, fieldoffset);
1720
      /* If the access is outside of the variable we can ignore it.  */
1721
      if (!v)
1722
        continue;
1723
 
1724
      do
1725
        {
1726
          if (v->may_have_pointers)
1727
            {
1728
              /* If v is a global variable then this is an escape point.  */
1729
              if (v->is_global_var
1730
                  && !escaped_p)
1731
                {
1732
                  t = find (escaped_id);
1733
                  if (add_graph_edge (graph, t, rhs)
1734
                      && bitmap_ior_into (get_varinfo (t)->solution, sol))
1735
                    bitmap_set_bit (changed, t);
1736
                  /* Enough to let rhs escape once.  */
1737
                  escaped_p = true;
1738
                }
1739
 
1740
              if (v->is_special_var)
1741
                break;
1742
 
1743
              t = find (v->id);
1744
              if (add_graph_edge (graph, t, rhs)
1745
                  && bitmap_ior_into (get_varinfo (t)->solution, sol))
1746
                bitmap_set_bit (changed, t);
1747
            }
1748
 
1749
          /* If the variable is not exactly at the requested offset
1750
             we have to include the next one.  */
1751
          if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1752
              || v->next == NULL)
1753
            break;
1754
 
1755
          v = v->next;
1756
          fieldoffset = v->offset;
1757
        }
1758
      while (1);
1759
    }
1760
}
1761
 
1762
/* Handle a non-simple (simple meaning requires no iteration),
1763
   constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved).  */
1764
 
1765
static void
1766
do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1767
{
1768
  if (c->lhs.type == DEREF)
1769
    {
1770
      if (c->rhs.type == ADDRESSOF)
1771
        {
1772
          gcc_unreachable();
1773
        }
1774
      else
1775
        {
1776
          /* *x = y */
1777
          do_ds_constraint (c, delta);
1778
        }
1779
    }
1780
  else if (c->rhs.type == DEREF)
1781
    {
1782
      /* x = *y */
1783
      if (!(get_varinfo (c->lhs.var)->is_special_var))
1784
        do_sd_constraint (graph, c, delta);
1785
    }
1786
  else
1787
    {
1788
      bitmap tmp;
1789
      bitmap solution;
1790
      bool flag = false;
1791
 
1792
      gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1793
      solution = get_varinfo (c->rhs.var)->solution;
1794
      tmp = get_varinfo (c->lhs.var)->solution;
1795
 
1796
      flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1797
 
1798
      if (flag)
1799
        {
1800
          get_varinfo (c->lhs.var)->solution = tmp;
1801
          bitmap_set_bit (changed, c->lhs.var);
1802
        }
1803
    }
1804
}
1805
 
1806
/* Initialize and return a new SCC info structure.  */
1807
 
1808
static struct scc_info *
1809
init_scc_info (size_t size)
1810
{
1811
  struct scc_info *si = XNEW (struct scc_info);
1812
  size_t i;
1813
 
1814
  si->current_index = 0;
1815
  si->visited = sbitmap_alloc (size);
1816
  sbitmap_zero (si->visited);
1817
  si->deleted = sbitmap_alloc (size);
1818
  sbitmap_zero (si->deleted);
1819
  si->node_mapping = XNEWVEC (unsigned int, size);
1820
  si->dfs = XCNEWVEC (unsigned int, size);
1821
 
1822
  for (i = 0; i < size; i++)
1823
    si->node_mapping[i] = i;
1824
 
1825
  si->scc_stack = VEC_alloc (unsigned, heap, 1);
1826
  return si;
1827
}
1828
 
1829
/* Free an SCC info structure pointed to by SI */
1830
 
1831
static void
1832
free_scc_info (struct scc_info *si)
1833
{
1834
  sbitmap_free (si->visited);
1835
  sbitmap_free (si->deleted);
1836
  free (si->node_mapping);
1837
  free (si->dfs);
1838
  VEC_free (unsigned, heap, si->scc_stack);
1839
  free (si);
1840
}
1841
 
1842
 
1843
/* Find indirect cycles in GRAPH that occur, using strongly connected
1844
   components, and note them in the indirect cycles map.
1845
 
1846
   This technique comes from Ben Hardekopf and Calvin Lin,
1847
   "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1848
   Lines of Code", submitted to PLDI 2007.  */
1849
 
1850
static void
1851
find_indirect_cycles (constraint_graph_t graph)
1852
{
1853
  unsigned int i;
1854
  unsigned int size = graph->size;
1855
  struct scc_info *si = init_scc_info (size);
1856
 
1857
  for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1858
    if (!TEST_BIT (si->visited, i) && find (i) == i)
1859
      scc_visit (graph, si, i);
1860
 
1861
  free_scc_info (si);
1862
}
1863
 
1864
/* Compute a topological ordering for GRAPH, and store the result in the
1865
   topo_info structure TI.  */
1866
 
1867
static void
1868
compute_topo_order (constraint_graph_t graph,
1869
                    struct topo_info *ti)
1870
{
1871
  unsigned int i;
1872
  unsigned int size = graph->size;
1873
 
1874
  for (i = 0; i != size; ++i)
1875
    if (!TEST_BIT (ti->visited, i) && find (i) == i)
1876
      topo_visit (graph, ti, i);
1877
}
1878
 
1879
/* Structure used to for hash value numbering of pointer equivalence
1880
   classes.  */
1881
 
1882
typedef struct equiv_class_label
1883
{
1884
  hashval_t hashcode;
1885
  unsigned int equivalence_class;
1886
  bitmap labels;
1887
} *equiv_class_label_t;
1888
typedef const struct equiv_class_label *const_equiv_class_label_t;
1889
 
1890
/* A hashtable for mapping a bitmap of labels->pointer equivalence
1891
   classes.  */
1892
static htab_t pointer_equiv_class_table;
1893
 
1894
/* A hashtable for mapping a bitmap of labels->location equivalence
1895
   classes.  */
1896
static htab_t location_equiv_class_table;
1897
 
1898
/* Hash function for a equiv_class_label_t */
1899
 
1900
static hashval_t
1901
equiv_class_label_hash (const void *p)
1902
{
1903
  const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1904
  return ecl->hashcode;
1905
}
1906
 
1907
/* Equality function for two equiv_class_label_t's.  */
1908
 
1909
static int
1910
equiv_class_label_eq (const void *p1, const void *p2)
1911
{
1912
  const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1913
  const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1914
  return (eql1->hashcode == eql2->hashcode
1915
          && bitmap_equal_p (eql1->labels, eql2->labels));
1916
}
1917
 
1918
/* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1919
   contains.  */
1920
 
1921
static unsigned int
1922
equiv_class_lookup (htab_t table, bitmap labels)
1923
{
1924
  void **slot;
1925
  struct equiv_class_label ecl;
1926
 
1927
  ecl.labels = labels;
1928
  ecl.hashcode = bitmap_hash (labels);
1929
 
1930
  slot = htab_find_slot_with_hash (table, &ecl,
1931
                                   ecl.hashcode, NO_INSERT);
1932
  if (!slot)
1933
    return 0;
1934
  else
1935
    return ((equiv_class_label_t) *slot)->equivalence_class;
1936
}
1937
 
1938
 
1939
/* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1940
   to TABLE.  */
1941
 
1942
static void
1943
equiv_class_add (htab_t table, unsigned int equivalence_class,
1944
                 bitmap labels)
1945
{
1946
  void **slot;
1947
  equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1948
 
1949
  ecl->labels = labels;
1950
  ecl->equivalence_class = equivalence_class;
1951
  ecl->hashcode = bitmap_hash (labels);
1952
 
1953
  slot = htab_find_slot_with_hash (table, ecl,
1954
                                   ecl->hashcode, INSERT);
1955
  gcc_assert (!*slot);
1956
  *slot = (void *) ecl;
1957
}
1958
 
1959
/* Perform offline variable substitution.
1960
 
1961
   This is a worst case quadratic time way of identifying variables
1962
   that must have equivalent points-to sets, including those caused by
1963
   static cycles, and single entry subgraphs, in the constraint graph.
1964
 
1965
   The technique is described in "Exploiting Pointer and Location
1966
   Equivalence to Optimize Pointer Analysis. In the 14th International
1967
   Static Analysis Symposium (SAS), August 2007."  It is known as the
1968
   "HU" algorithm, and is equivalent to value numbering the collapsed
1969
   constraint graph including evaluating unions.
1970
 
1971
   The general method of finding equivalence classes is as follows:
1972
   Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1973
   Initialize all non-REF nodes to be direct nodes.
1974
   For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1975
   variable}
1976
   For each constraint containing the dereference, we also do the same
1977
   thing.
1978
 
1979
   We then compute SCC's in the graph and unify nodes in the same SCC,
1980
   including pts sets.
1981
 
1982
   For each non-collapsed node x:
1983
    Visit all unvisited explicit incoming edges.
1984
    Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1985
    where y->x.
1986
    Lookup the equivalence class for pts(x).
1987
     If we found one, equivalence_class(x) = found class.
1988
     Otherwise, equivalence_class(x) = new class, and new_class is
1989
    added to the lookup table.
1990
 
1991
   All direct nodes with the same equivalence class can be replaced
1992
   with a single representative node.
1993
   All unlabeled nodes (label == 0) are not pointers and all edges
1994
   involving them can be eliminated.
1995
   We perform these optimizations during rewrite_constraints
1996
 
1997
   In addition to pointer equivalence class finding, we also perform
1998
   location equivalence class finding.  This is the set of variables
1999
   that always appear together in points-to sets.  We use this to
2000
   compress the size of the points-to sets.  */
2001
 
2002
/* Current maximum pointer equivalence class id.  */
2003
static int pointer_equiv_class;
2004
 
2005
/* Current maximum location equivalence class id.  */
2006
static int location_equiv_class;
2007
 
2008
/* Recursive routine to find strongly connected components in GRAPH,
2009
   and label it's nodes with DFS numbers.  */
2010
 
2011
static void
2012
condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2013
{
2014
  unsigned int i;
2015
  bitmap_iterator bi;
2016
  unsigned int my_dfs;
2017
 
2018
  gcc_assert (si->node_mapping[n] == n);
2019
  SET_BIT (si->visited, n);
2020
  si->dfs[n] = si->current_index ++;
2021
  my_dfs = si->dfs[n];
2022
 
2023
  /* Visit all the successors.  */
2024
  EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2025
    {
2026
      unsigned int w = si->node_mapping[i];
2027
 
2028
      if (TEST_BIT (si->deleted, w))
2029
        continue;
2030
 
2031
      if (!TEST_BIT (si->visited, w))
2032
        condense_visit (graph, si, w);
2033
      {
2034
        unsigned int t = si->node_mapping[w];
2035
        unsigned int nnode = si->node_mapping[n];
2036
        gcc_assert (nnode == n);
2037
 
2038
        if (si->dfs[t] < si->dfs[nnode])
2039
          si->dfs[n] = si->dfs[t];
2040
      }
2041
    }
2042
 
2043
  /* Visit all the implicit predecessors.  */
2044
  EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2045
    {
2046
      unsigned int w = si->node_mapping[i];
2047
 
2048
      if (TEST_BIT (si->deleted, w))
2049
        continue;
2050
 
2051
      if (!TEST_BIT (si->visited, w))
2052
        condense_visit (graph, si, w);
2053
      {
2054
        unsigned int t = si->node_mapping[w];
2055
        unsigned int nnode = si->node_mapping[n];
2056
        gcc_assert (nnode == n);
2057
 
2058
        if (si->dfs[t] < si->dfs[nnode])
2059
          si->dfs[n] = si->dfs[t];
2060
      }
2061
    }
2062
 
2063
  /* See if any components have been identified.  */
2064
  if (si->dfs[n] == my_dfs)
2065
    {
2066
      while (VEC_length (unsigned, si->scc_stack) != 0
2067
             && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
2068
        {
2069
          unsigned int w = VEC_pop (unsigned, si->scc_stack);
2070
          si->node_mapping[w] = n;
2071
 
2072
          if (!TEST_BIT (graph->direct_nodes, w))
2073
            RESET_BIT (graph->direct_nodes, n);
2074
 
2075
          /* Unify our nodes.  */
2076
          if (graph->preds[w])
2077
            {
2078
              if (!graph->preds[n])
2079
                graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2080
              bitmap_ior_into (graph->preds[n], graph->preds[w]);
2081
            }
2082
          if (graph->implicit_preds[w])
2083
            {
2084
              if (!graph->implicit_preds[n])
2085
                graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2086
              bitmap_ior_into (graph->implicit_preds[n],
2087
                               graph->implicit_preds[w]);
2088
            }
2089
          if (graph->points_to[w])
2090
            {
2091
              if (!graph->points_to[n])
2092
                graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2093
              bitmap_ior_into (graph->points_to[n],
2094
                               graph->points_to[w]);
2095
            }
2096
        }
2097
      SET_BIT (si->deleted, n);
2098
    }
2099
  else
2100
    VEC_safe_push (unsigned, heap, si->scc_stack, n);
2101
}
2102
 
2103
/* Label pointer equivalences.  */
2104
 
2105
static void
2106
label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2107
{
2108
  unsigned int i;
2109
  bitmap_iterator bi;
2110
  SET_BIT (si->visited, n);
2111
 
2112
  if (!graph->points_to[n])
2113
    graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2114
 
2115
  /* Label and union our incoming edges's points to sets.  */
2116
  EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2117
    {
2118
      unsigned int w = si->node_mapping[i];
2119
      if (!TEST_BIT (si->visited, w))
2120
        label_visit (graph, si, w);
2121
 
2122
      /* Skip unused edges  */
2123
      if (w == n || graph->pointer_label[w] == 0)
2124
        continue;
2125
 
2126
      if (graph->points_to[w])
2127
        bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2128
    }
2129
  /* Indirect nodes get fresh variables.  */
2130
  if (!TEST_BIT (graph->direct_nodes, n))
2131
    bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2132
 
2133
  if (!bitmap_empty_p (graph->points_to[n]))
2134
    {
2135
      unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
2136
                                               graph->points_to[n]);
2137
      if (!label)
2138
        {
2139
          label = pointer_equiv_class++;
2140
          equiv_class_add (pointer_equiv_class_table,
2141
                           label, graph->points_to[n]);
2142
        }
2143
      graph->pointer_label[n] = label;
2144
    }
2145
}
2146
 
2147
/* Perform offline variable substitution, discovering equivalence
2148
   classes, and eliminating non-pointer variables.  */
2149
 
2150
static struct scc_info *
2151
perform_var_substitution (constraint_graph_t graph)
2152
{
2153
  unsigned int i;
2154
  unsigned int size = graph->size;
2155
  struct scc_info *si = init_scc_info (size);
2156
 
2157
  bitmap_obstack_initialize (&iteration_obstack);
2158
  pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2159
                                           equiv_class_label_eq, free);
2160
  location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2161
                                            equiv_class_label_eq, free);
2162
  pointer_equiv_class = 1;
2163
  location_equiv_class = 1;
2164
 
2165
  /* Condense the nodes, which means to find SCC's, count incoming
2166
     predecessors, and unite nodes in SCC's.  */
2167
  for (i = 0; i < FIRST_REF_NODE; i++)
2168
    if (!TEST_BIT (si->visited, si->node_mapping[i]))
2169
      condense_visit (graph, si, si->node_mapping[i]);
2170
 
2171
  sbitmap_zero (si->visited);
2172
  /* Actually the label the nodes for pointer equivalences  */
2173
  for (i = 0; i < FIRST_REF_NODE; i++)
2174
    if (!TEST_BIT (si->visited, si->node_mapping[i]))
2175
      label_visit (graph, si, si->node_mapping[i]);
2176
 
2177
  /* Calculate location equivalence labels.  */
2178
  for (i = 0; i < FIRST_REF_NODE; i++)
2179
    {
2180
      bitmap pointed_by;
2181
      bitmap_iterator bi;
2182
      unsigned int j;
2183
      unsigned int label;
2184
 
2185
      if (!graph->pointed_by[i])
2186
        continue;
2187
      pointed_by = BITMAP_ALLOC (&iteration_obstack);
2188
 
2189
      /* Translate the pointed-by mapping for pointer equivalence
2190
         labels.  */
2191
      EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2192
        {
2193
          bitmap_set_bit (pointed_by,
2194
                          graph->pointer_label[si->node_mapping[j]]);
2195
        }
2196
      /* The original pointed_by is now dead.  */
2197
      BITMAP_FREE (graph->pointed_by[i]);
2198
 
2199
      /* Look up the location equivalence label if one exists, or make
2200
         one otherwise.  */
2201
      label = equiv_class_lookup (location_equiv_class_table,
2202
                                  pointed_by);
2203
      if (label == 0)
2204
        {
2205
          label = location_equiv_class++;
2206
          equiv_class_add (location_equiv_class_table,
2207
                           label, pointed_by);
2208
        }
2209
      else
2210
        {
2211
          if (dump_file && (dump_flags & TDF_DETAILS))
2212
            fprintf (dump_file, "Found location equivalence for node %s\n",
2213
                     get_varinfo (i)->name);
2214
          BITMAP_FREE (pointed_by);
2215
        }
2216
      graph->loc_label[i] = label;
2217
 
2218
    }
2219
 
2220
  if (dump_file && (dump_flags & TDF_DETAILS))
2221
    for (i = 0; i < FIRST_REF_NODE; i++)
2222
      {
2223
        bool direct_node = TEST_BIT (graph->direct_nodes, i);
2224
        fprintf (dump_file,
2225
                 "Equivalence classes for %s node id %d:%s are pointer: %d"
2226
                 ", location:%d\n",
2227
                 direct_node ? "Direct node" : "Indirect node", i,
2228
                 get_varinfo (i)->name,
2229
                 graph->pointer_label[si->node_mapping[i]],
2230
                 graph->loc_label[si->node_mapping[i]]);
2231
      }
2232
 
2233
  /* Quickly eliminate our non-pointer variables.  */
2234
 
2235
  for (i = 0; i < FIRST_REF_NODE; i++)
2236
    {
2237
      unsigned int node = si->node_mapping[i];
2238
 
2239
      if (graph->pointer_label[node] == 0)
2240
        {
2241
          if (dump_file && (dump_flags & TDF_DETAILS))
2242
            fprintf (dump_file,
2243
                     "%s is a non-pointer variable, eliminating edges.\n",
2244
                     get_varinfo (node)->name);
2245
          stats.nonpointer_vars++;
2246
          clear_edges_for_node (graph, node);
2247
        }
2248
    }
2249
 
2250
  return si;
2251
}
2252
 
2253
/* Free information that was only necessary for variable
2254
   substitution.  */
2255
 
2256
static void
2257
free_var_substitution_info (struct scc_info *si)
2258
{
2259
  free_scc_info (si);
2260
  free (graph->pointer_label);
2261
  free (graph->loc_label);
2262
  free (graph->pointed_by);
2263
  free (graph->points_to);
2264
  free (graph->eq_rep);
2265
  sbitmap_free (graph->direct_nodes);
2266
  htab_delete (pointer_equiv_class_table);
2267
  htab_delete (location_equiv_class_table);
2268
  bitmap_obstack_release (&iteration_obstack);
2269
}
2270
 
2271
/* Return an existing node that is equivalent to NODE, which has
2272
   equivalence class LABEL, if one exists.  Return NODE otherwise.  */
2273
 
2274
static unsigned int
2275
find_equivalent_node (constraint_graph_t graph,
2276
                      unsigned int node, unsigned int label)
2277
{
2278
  /* If the address version of this variable is unused, we can
2279
     substitute it for anything else with the same label.
2280
     Otherwise, we know the pointers are equivalent, but not the
2281
     locations, and we can unite them later.  */
2282
 
2283
  if (!bitmap_bit_p (graph->address_taken, node))
2284
    {
2285
      gcc_assert (label < graph->size);
2286
 
2287
      if (graph->eq_rep[label] != -1)
2288
        {
2289
          /* Unify the two variables since we know they are equivalent.  */
2290
          if (unite (graph->eq_rep[label], node))
2291
            unify_nodes (graph, graph->eq_rep[label], node, false);
2292
          return graph->eq_rep[label];
2293
        }
2294
      else
2295
        {
2296
          graph->eq_rep[label] = node;
2297
          graph->pe_rep[label] = node;
2298
        }
2299
    }
2300
  else
2301
    {
2302
      gcc_assert (label < graph->size);
2303
      graph->pe[node] = label;
2304
      if (graph->pe_rep[label] == -1)
2305
        graph->pe_rep[label] = node;
2306
    }
2307
 
2308
  return node;
2309
}
2310
 
2311
/* Unite pointer equivalent but not location equivalent nodes in
2312
   GRAPH.  This may only be performed once variable substitution is
2313
   finished.  */
2314
 
2315
static void
2316
unite_pointer_equivalences (constraint_graph_t graph)
2317
{
2318
  unsigned int i;
2319
 
2320
  /* Go through the pointer equivalences and unite them to their
2321
     representative, if they aren't already.  */
2322
  for (i = 0; i < FIRST_REF_NODE; i++)
2323
    {
2324
      unsigned int label = graph->pe[i];
2325
      if (label)
2326
        {
2327
          int label_rep = graph->pe_rep[label];
2328
 
2329
          if (label_rep == -1)
2330
            continue;
2331
 
2332
          label_rep = find (label_rep);
2333
          if (label_rep >= 0 && unite (label_rep, find (i)))
2334
            unify_nodes (graph, label_rep, i, false);
2335
        }
2336
    }
2337
}
2338
 
2339
/* Move complex constraints to the GRAPH nodes they belong to.  */
2340
 
2341
static void
2342
move_complex_constraints (constraint_graph_t graph)
2343
{
2344
  int i;
2345
  constraint_t c;
2346
 
2347
  FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
2348
    {
2349
      if (c)
2350
        {
2351
          struct constraint_expr lhs = c->lhs;
2352
          struct constraint_expr rhs = c->rhs;
2353
 
2354
          if (lhs.type == DEREF)
2355
            {
2356
              insert_into_complex (graph, lhs.var, c);
2357
            }
2358
          else if (rhs.type == DEREF)
2359
            {
2360
              if (!(get_varinfo (lhs.var)->is_special_var))
2361
                insert_into_complex (graph, rhs.var, c);
2362
            }
2363
          else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2364
                   && (lhs.offset != 0 || rhs.offset != 0))
2365
            {
2366
              insert_into_complex (graph, rhs.var, c);
2367
            }
2368
        }
2369
    }
2370
}
2371
 
2372
 
2373
/* Optimize and rewrite complex constraints while performing
2374
   collapsing of equivalent nodes.  SI is the SCC_INFO that is the
2375
   result of perform_variable_substitution.  */
2376
 
2377
static void
2378
rewrite_constraints (constraint_graph_t graph,
2379
                     struct scc_info *si)
2380
{
2381
  int i;
2382
  unsigned int j;
2383
  constraint_t c;
2384
 
2385
  for (j = 0; j < graph->size; j++)
2386
    gcc_assert (find (j) == j);
2387
 
2388
  FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
2389
    {
2390
      struct constraint_expr lhs = c->lhs;
2391
      struct constraint_expr rhs = c->rhs;
2392
      unsigned int lhsvar = find (lhs.var);
2393
      unsigned int rhsvar = find (rhs.var);
2394
      unsigned int lhsnode, rhsnode;
2395
      unsigned int lhslabel, rhslabel;
2396
 
2397
      lhsnode = si->node_mapping[lhsvar];
2398
      rhsnode = si->node_mapping[rhsvar];
2399
      lhslabel = graph->pointer_label[lhsnode];
2400
      rhslabel = graph->pointer_label[rhsnode];
2401
 
2402
      /* See if it is really a non-pointer variable, and if so, ignore
2403
         the constraint.  */
2404
      if (lhslabel == 0)
2405
        {
2406
          if (dump_file && (dump_flags & TDF_DETAILS))
2407
            {
2408
 
2409
              fprintf (dump_file, "%s is a non-pointer variable,"
2410
                       "ignoring constraint:",
2411
                       get_varinfo (lhs.var)->name);
2412
              dump_constraint (dump_file, c);
2413
              fprintf (dump_file, "\n");
2414
            }
2415
          VEC_replace (constraint_t, constraints, i, NULL);
2416
          continue;
2417
        }
2418
 
2419
      if (rhslabel == 0)
2420
        {
2421
          if (dump_file && (dump_flags & TDF_DETAILS))
2422
            {
2423
 
2424
              fprintf (dump_file, "%s is a non-pointer variable,"
2425
                       "ignoring constraint:",
2426
                       get_varinfo (rhs.var)->name);
2427
              dump_constraint (dump_file, c);
2428
              fprintf (dump_file, "\n");
2429
            }
2430
          VEC_replace (constraint_t, constraints, i, NULL);
2431
          continue;
2432
        }
2433
 
2434
      lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2435
      rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2436
      c->lhs.var = lhsvar;
2437
      c->rhs.var = rhsvar;
2438
 
2439
    }
2440
}
2441
 
2442
/* Eliminate indirect cycles involving NODE.  Return true if NODE was
2443
   part of an SCC, false otherwise.  */
2444
 
2445
static bool
2446
eliminate_indirect_cycles (unsigned int node)
2447
{
2448
  if (graph->indirect_cycles[node] != -1
2449
      && !bitmap_empty_p (get_varinfo (node)->solution))
2450
    {
2451
      unsigned int i;
2452
      VEC(unsigned,heap) *queue = NULL;
2453
      int queuepos;
2454
      unsigned int to = find (graph->indirect_cycles[node]);
2455
      bitmap_iterator bi;
2456
 
2457
      /* We can't touch the solution set and call unify_nodes
2458
         at the same time, because unify_nodes is going to do
2459
         bitmap unions into it. */
2460
 
2461
      EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2462
        {
2463
          if (find (i) == i && i != to)
2464
            {
2465
              if (unite (to, i))
2466
                VEC_safe_push (unsigned, heap, queue, i);
2467
            }
2468
        }
2469
 
2470
      for (queuepos = 0;
2471
           VEC_iterate (unsigned, queue, queuepos, i);
2472
           queuepos++)
2473
        {
2474
          unify_nodes (graph, to, i, true);
2475
        }
2476
      VEC_free (unsigned, heap, queue);
2477
      return true;
2478
    }
2479
  return false;
2480
}
2481
 
2482
/* Solve the constraint graph GRAPH using our worklist solver.
2483
   This is based on the PW* family of solvers from the "Efficient Field
2484
   Sensitive Pointer Analysis for C" paper.
2485
   It works by iterating over all the graph nodes, processing the complex
2486
   constraints and propagating the copy constraints, until everything stops
2487
   changed.  This corresponds to steps 6-8 in the solving list given above.  */
2488
 
2489
static void
2490
solve_graph (constraint_graph_t graph)
2491
{
2492
  unsigned int size = graph->size;
2493
  unsigned int i;
2494
  bitmap pts;
2495
 
2496
  changed = BITMAP_ALLOC (NULL);
2497
 
2498
  /* Mark all initial non-collapsed nodes as changed.  */
2499
  for (i = 0; i < size; i++)
2500
    {
2501
      varinfo_t ivi = get_varinfo (i);
2502
      if (find (i) == i && !bitmap_empty_p (ivi->solution)
2503
          && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2504
              || VEC_length (constraint_t, graph->complex[i]) > 0))
2505
        bitmap_set_bit (changed, i);
2506
    }
2507
 
2508
  /* Allocate a bitmap to be used to store the changed bits.  */
2509
  pts = BITMAP_ALLOC (&pta_obstack);
2510
 
2511
  while (!bitmap_empty_p (changed))
2512
    {
2513
      unsigned int i;
2514
      struct topo_info *ti = init_topo_info ();
2515
      stats.iterations++;
2516
 
2517
      bitmap_obstack_initialize (&iteration_obstack);
2518
 
2519
      compute_topo_order (graph, ti);
2520
 
2521
      while (VEC_length (unsigned, ti->topo_order) != 0)
2522
        {
2523
 
2524
          i = VEC_pop (unsigned, ti->topo_order);
2525
 
2526
          /* If this variable is not a representative, skip it.  */
2527
          if (find (i) != i)
2528
            continue;
2529
 
2530
          /* In certain indirect cycle cases, we may merge this
2531
             variable to another.  */
2532
          if (eliminate_indirect_cycles (i) && find (i) != i)
2533
            continue;
2534
 
2535
          /* If the node has changed, we need to process the
2536
             complex constraints and outgoing edges again.  */
2537
          if (bitmap_clear_bit (changed, i))
2538
            {
2539
              unsigned int j;
2540
              constraint_t c;
2541
              bitmap solution;
2542
              VEC(constraint_t,heap) *complex = graph->complex[i];
2543
              varinfo_t vi = get_varinfo (i);
2544
              bool solution_empty;
2545
 
2546
              /* Compute the changed set of solution bits.  */
2547
              if (vi->oldsolution)
2548
                bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2549
              else
2550
                bitmap_copy (pts, vi->solution);
2551
 
2552
              if (bitmap_empty_p (pts))
2553
                continue;
2554
 
2555
              if (vi->oldsolution)
2556
                bitmap_ior_into (vi->oldsolution, pts);
2557
              else
2558
                {
2559
                  vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2560
                  bitmap_copy (vi->oldsolution, pts);
2561
                }
2562
 
2563
              solution = vi->solution;
2564
              solution_empty = bitmap_empty_p (solution);
2565
 
2566
              /* Process the complex constraints */
2567
              FOR_EACH_VEC_ELT (constraint_t, complex, j, c)
2568
                {
2569
                  /* XXX: This is going to unsort the constraints in
2570
                     some cases, which will occasionally add duplicate
2571
                     constraints during unification.  This does not
2572
                     affect correctness.  */
2573
                  c->lhs.var = find (c->lhs.var);
2574
                  c->rhs.var = find (c->rhs.var);
2575
 
2576
                  /* The only complex constraint that can change our
2577
                     solution to non-empty, given an empty solution,
2578
                     is a constraint where the lhs side is receiving
2579
                     some set from elsewhere.  */
2580
                  if (!solution_empty || c->lhs.type != DEREF)
2581
                    do_complex_constraint (graph, c, pts);
2582
                }
2583
 
2584
              solution_empty = bitmap_empty_p (solution);
2585
 
2586
              if (!solution_empty)
2587
                {
2588
                  bitmap_iterator bi;
2589
                  unsigned eff_escaped_id = find (escaped_id);
2590
 
2591
                  /* Propagate solution to all successors.  */
2592
                  EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2593
                                                0, j, bi)
2594
                    {
2595
                      bitmap tmp;
2596
                      bool flag;
2597
 
2598
                      unsigned int to = find (j);
2599
                      tmp = get_varinfo (to)->solution;
2600
                      flag = false;
2601
 
2602
                      /* Don't try to propagate to ourselves.  */
2603
                      if (to == i)
2604
                        continue;
2605
 
2606
                      /* If we propagate from ESCAPED use ESCAPED as
2607
                         placeholder.  */
2608
                      if (i == eff_escaped_id)
2609
                        flag = bitmap_set_bit (tmp, escaped_id);
2610
                      else
2611
                        flag = set_union_with_increment (tmp, pts, 0);
2612
 
2613
                      if (flag)
2614
                        {
2615
                          get_varinfo (to)->solution = tmp;
2616
                          bitmap_set_bit (changed, to);
2617
                        }
2618
                    }
2619
                }
2620
            }
2621
        }
2622
      free_topo_info (ti);
2623
      bitmap_obstack_release (&iteration_obstack);
2624
    }
2625
 
2626
  BITMAP_FREE (pts);
2627
  BITMAP_FREE (changed);
2628
  bitmap_obstack_release (&oldpta_obstack);
2629
}
2630
 
2631
/* Map from trees to variable infos.  */
2632
static struct pointer_map_t *vi_for_tree;
2633
 
2634
 
2635
/* Insert ID as the variable id for tree T in the vi_for_tree map.  */
2636
 
2637
static void
2638
insert_vi_for_tree (tree t, varinfo_t vi)
2639
{
2640
  void **slot = pointer_map_insert (vi_for_tree, t);
2641
  gcc_assert (vi);
2642
  gcc_assert (*slot == NULL);
2643
  *slot = vi;
2644
}
2645
 
2646
/* Find the variable info for tree T in VI_FOR_TREE.  If T does not
2647
   exist in the map, return NULL, otherwise, return the varinfo we found.  */
2648
 
2649
static varinfo_t
2650
lookup_vi_for_tree (tree t)
2651
{
2652
  void **slot = pointer_map_contains (vi_for_tree, t);
2653
  if (slot == NULL)
2654
    return NULL;
2655
 
2656
  return (varinfo_t) *slot;
2657
}
2658
 
2659
/* Return a printable name for DECL  */
2660
 
2661
static const char *
2662
alias_get_name (tree decl)
2663
{
2664
  const char *res;
2665
  char *temp;
2666
  int num_printed = 0;
2667
 
2668
  if (DECL_ASSEMBLER_NAME_SET_P (decl))
2669
    res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2670
  else
2671
    res= get_name (decl);
2672
  if (res != NULL)
2673
    return res;
2674
 
2675
  res = "NULL";
2676
  if (!dump_file)
2677
    return res;
2678
 
2679
  if (TREE_CODE (decl) == SSA_NAME)
2680
    {
2681
      num_printed = asprintf (&temp, "%s_%u",
2682
                              alias_get_name (SSA_NAME_VAR (decl)),
2683
                              SSA_NAME_VERSION (decl));
2684
    }
2685
  else if (DECL_P (decl))
2686
    {
2687
      num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2688
    }
2689
  if (num_printed > 0)
2690
    {
2691
      res = ggc_strdup (temp);
2692
      free (temp);
2693
    }
2694
  return res;
2695
}
2696
 
2697
/* Find the variable id for tree T in the map.
2698
   If T doesn't exist in the map, create an entry for it and return it.  */
2699
 
2700
static varinfo_t
2701
get_vi_for_tree (tree t)
2702
{
2703
  void **slot = pointer_map_contains (vi_for_tree, t);
2704
  if (slot == NULL)
2705
    return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2706
 
2707
  return (varinfo_t) *slot;
2708
}
2709
 
2710
/* Get a scalar constraint expression for a new temporary variable.  */
2711
 
2712
static struct constraint_expr
2713
new_scalar_tmp_constraint_exp (const char *name)
2714
{
2715
  struct constraint_expr tmp;
2716
  varinfo_t vi;
2717
 
2718
  vi = new_var_info (NULL_TREE, name);
2719
  vi->offset = 0;
2720
  vi->size = -1;
2721
  vi->fullsize = -1;
2722
  vi->is_full_var = 1;
2723
 
2724
  tmp.var = vi->id;
2725
  tmp.type = SCALAR;
2726
  tmp.offset = 0;
2727
 
2728
  return tmp;
2729
}
2730
 
2731
/* Get a constraint expression vector from an SSA_VAR_P node.
2732
   If address_p is true, the result will be taken its address of.  */
2733
 
2734
static void
2735
get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
2736
{
2737
  struct constraint_expr cexpr;
2738
  varinfo_t vi;
2739
 
2740
  /* We allow FUNCTION_DECLs here even though it doesn't make much sense.  */
2741
  gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2742
 
2743
  /* For parameters, get at the points-to set for the actual parm
2744
     decl.  */
2745
  if (TREE_CODE (t) == SSA_NAME
2746
      && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2747
          || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
2748
      && SSA_NAME_IS_DEFAULT_DEF (t))
2749
    {
2750
      get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2751
      return;
2752
    }
2753
 
2754
  /* For global variables resort to the alias target.  */
2755
  if (TREE_CODE (t) == VAR_DECL
2756
      && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2757
    {
2758
      struct varpool_node *node = varpool_get_node (t);
2759
      if (node && node->alias)
2760
        {
2761
          node = varpool_variable_node (node, NULL);
2762
          t = node->decl;
2763
        }
2764
    }
2765
 
2766
  vi = get_vi_for_tree (t);
2767
  cexpr.var = vi->id;
2768
  cexpr.type = SCALAR;
2769
  cexpr.offset = 0;
2770
  /* If we determine the result is "anything", and we know this is readonly,
2771
     say it points to readonly memory instead.  */
2772
  if (cexpr.var == anything_id && TREE_READONLY (t))
2773
    {
2774
      gcc_unreachable ();
2775
      cexpr.type = ADDRESSOF;
2776
      cexpr.var = readonly_id;
2777
    }
2778
 
2779
  /* If we are not taking the address of the constraint expr, add all
2780
     sub-fiels of the variable as well.  */
2781
  if (!address_p
2782
      && !vi->is_full_var)
2783
    {
2784
      for (; vi; vi = vi->next)
2785
        {
2786
          cexpr.var = vi->id;
2787
          VEC_safe_push (ce_s, heap, *results, &cexpr);
2788
        }
2789
      return;
2790
    }
2791
 
2792
  VEC_safe_push (ce_s, heap, *results, &cexpr);
2793
}
2794
 
2795
/* Process constraint T, performing various simplifications and then
2796
   adding it to our list of overall constraints.  */
2797
 
2798
static void
2799
process_constraint (constraint_t t)
2800
{
2801
  struct constraint_expr rhs = t->rhs;
2802
  struct constraint_expr lhs = t->lhs;
2803
 
2804
  gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2805
  gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2806
 
2807
  /* If we didn't get any useful constraint from the lhs we get
2808
     &ANYTHING as fallback from get_constraint_for.  Deal with
2809
     it here by turning it into *ANYTHING.  */
2810
  if (lhs.type == ADDRESSOF
2811
      && lhs.var == anything_id)
2812
    lhs.type = DEREF;
2813
 
2814
  /* ADDRESSOF on the lhs is invalid.  */
2815
  gcc_assert (lhs.type != ADDRESSOF);
2816
 
2817
  /* We shouldn't add constraints from things that cannot have pointers.
2818
     It's not completely trivial to avoid in the callers, so do it here.  */
2819
  if (rhs.type != ADDRESSOF
2820
      && !get_varinfo (rhs.var)->may_have_pointers)
2821
    return;
2822
 
2823
  /* Likewise adding to the solution of a non-pointer var isn't useful.  */
2824
  if (!get_varinfo (lhs.var)->may_have_pointers)
2825
    return;
2826
 
2827
  /* This can happen in our IR with things like n->a = *p */
2828
  if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2829
    {
2830
      /* Split into tmp = *rhs, *lhs = tmp */
2831
      struct constraint_expr tmplhs;
2832
      tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2833
      process_constraint (new_constraint (tmplhs, rhs));
2834
      process_constraint (new_constraint (lhs, tmplhs));
2835
    }
2836
  else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2837
    {
2838
      /* Split into tmp = &rhs, *lhs = tmp */
2839
      struct constraint_expr tmplhs;
2840
      tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2841
      process_constraint (new_constraint (tmplhs, rhs));
2842
      process_constraint (new_constraint (lhs, tmplhs));
2843
    }
2844
  else
2845
    {
2846
      gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2847
      VEC_safe_push (constraint_t, heap, constraints, t);
2848
    }
2849
}
2850
 
2851
 
2852
/* Return the position, in bits, of FIELD_DECL from the beginning of its
2853
   structure.  */
2854
 
2855
static HOST_WIDE_INT
2856
bitpos_of_field (const tree fdecl)
2857
{
2858
  if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2859
      || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2860
    return -1;
2861
 
2862
  return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
2863
          + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2864
}
2865
 
2866
 
2867
/* Get constraint expressions for offsetting PTR by OFFSET.  Stores the
2868
   resulting constraint expressions in *RESULTS.  */
2869
 
2870
static void
2871
get_constraint_for_ptr_offset (tree ptr, tree offset,
2872
                               VEC (ce_s, heap) **results)
2873
{
2874
  struct constraint_expr c;
2875
  unsigned int j, n;
2876
  HOST_WIDE_INT rhsoffset;
2877
 
2878
  /* If we do not do field-sensitive PTA adding offsets to pointers
2879
     does not change the points-to solution.  */
2880
  if (!use_field_sensitive)
2881
    {
2882
      get_constraint_for_rhs (ptr, results);
2883
      return;
2884
    }
2885
 
2886
  /* If the offset is not a non-negative integer constant that fits
2887
     in a HOST_WIDE_INT, we have to fall back to a conservative
2888
     solution which includes all sub-fields of all pointed-to
2889
     variables of ptr.  */
2890
  if (offset == NULL_TREE
2891
      || TREE_CODE (offset) != INTEGER_CST)
2892
    rhsoffset = UNKNOWN_OFFSET;
2893
  else
2894
    {
2895
      /* Sign-extend the offset.  */
2896
      double_int soffset
2897
        = double_int_sext (tree_to_double_int (offset),
2898
                           TYPE_PRECISION (TREE_TYPE (offset)));
2899
      if (!double_int_fits_in_shwi_p (soffset))
2900
        rhsoffset = UNKNOWN_OFFSET;
2901
      else
2902
        {
2903
          /* Make sure the bit-offset also fits.  */
2904
          HOST_WIDE_INT rhsunitoffset = soffset.low;
2905
          rhsoffset = rhsunitoffset * BITS_PER_UNIT;
2906
          if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
2907
            rhsoffset = UNKNOWN_OFFSET;
2908
        }
2909
    }
2910
 
2911
  get_constraint_for_rhs (ptr, results);
2912
  if (rhsoffset == 0)
2913
    return;
2914
 
2915
  /* As we are eventually appending to the solution do not use
2916
     VEC_iterate here.  */
2917
  n = VEC_length (ce_s, *results);
2918
  for (j = 0; j < n; j++)
2919
    {
2920
      varinfo_t curr;
2921
      c = *VEC_index (ce_s, *results, j);
2922
      curr = get_varinfo (c.var);
2923
 
2924
      if (c.type == ADDRESSOF
2925
          /* If this varinfo represents a full variable just use it.  */
2926
          && curr->is_full_var)
2927
        c.offset = 0;
2928
      else if (c.type == ADDRESSOF
2929
               /* If we do not know the offset add all subfields.  */
2930
               && rhsoffset == UNKNOWN_OFFSET)
2931
        {
2932
          varinfo_t temp = lookup_vi_for_tree (curr->decl);
2933
          do
2934
            {
2935
              struct constraint_expr c2;
2936
              c2.var = temp->id;
2937
              c2.type = ADDRESSOF;
2938
              c2.offset = 0;
2939
              if (c2.var != c.var)
2940
                VEC_safe_push (ce_s, heap, *results, &c2);
2941
              temp = temp->next;
2942
            }
2943
          while (temp);
2944
        }
2945
      else if (c.type == ADDRESSOF)
2946
        {
2947
          varinfo_t temp;
2948
          unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
2949
 
2950
          /* Search the sub-field which overlaps with the
2951
             pointed-to offset.  If the result is outside of the variable
2952
             we have to provide a conservative result, as the variable is
2953
             still reachable from the resulting pointer (even though it
2954
             technically cannot point to anything).  The last and first
2955
             sub-fields are such conservative results.
2956
             ???  If we always had a sub-field for &object + 1 then
2957
             we could represent this in a more precise way.  */
2958
          if (rhsoffset < 0
2959
              && curr->offset < offset)
2960
            offset = 0;
2961
          temp = first_or_preceding_vi_for_offset (curr, offset);
2962
 
2963
          /* If the found variable is not exactly at the pointed to
2964
             result, we have to include the next variable in the
2965
             solution as well.  Otherwise two increments by offset / 2
2966
             do not result in the same or a conservative superset
2967
             solution.  */
2968
          if (temp->offset != offset
2969
              && temp->next != NULL)
2970
            {
2971
              struct constraint_expr c2;
2972
              c2.var = temp->next->id;
2973
              c2.type = ADDRESSOF;
2974
              c2.offset = 0;
2975
              VEC_safe_push (ce_s, heap, *results, &c2);
2976
            }
2977
          c.var = temp->id;
2978
          c.offset = 0;
2979
        }
2980
      else
2981
        c.offset = rhsoffset;
2982
 
2983
      VEC_replace (ce_s, *results, j, &c);
2984
    }
2985
}
2986
 
2987
 
2988
/* Given a COMPONENT_REF T, return the constraint_expr vector for it.
2989
   If address_p is true the result will be taken its address of.
2990
   If lhs_p is true then the constraint expression is assumed to be used
2991
   as the lhs.  */
2992
 
2993
static void
2994
get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
2995
                                  bool address_p, bool lhs_p)
2996
{
2997
  tree orig_t = t;
2998
  HOST_WIDE_INT bitsize = -1;
2999
  HOST_WIDE_INT bitmaxsize = -1;
3000
  HOST_WIDE_INT bitpos;
3001
  tree forzero;
3002
  struct constraint_expr *result;
3003
 
3004
  /* Some people like to do cute things like take the address of
3005
     &0->a.b */
3006
  forzero = t;
3007
  while (handled_component_p (forzero)
3008
         || INDIRECT_REF_P (forzero)
3009
         || TREE_CODE (forzero) == MEM_REF)
3010
    forzero = TREE_OPERAND (forzero, 0);
3011
 
3012
  if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3013
    {
3014
      struct constraint_expr temp;
3015
 
3016
      temp.offset = 0;
3017
      temp.var = integer_id;
3018
      temp.type = SCALAR;
3019
      VEC_safe_push (ce_s, heap, *results, &temp);
3020
      return;
3021
    }
3022
 
3023
  /* Handle type-punning through unions.  If we are extracting a pointer
3024
     from a union via a possibly type-punning access that pointer
3025
     points to anything, similar to a conversion of an integer to
3026
     a pointer.  */
3027
  if (!lhs_p)
3028
    {
3029
      tree u;
3030
      for (u = t;
3031
           TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3032
           u = TREE_OPERAND (u, 0))
3033
        if (TREE_CODE (u) == COMPONENT_REF
3034
            && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3035
          {
3036
            struct constraint_expr temp;
3037
 
3038
            temp.offset = 0;
3039
            temp.var = anything_id;
3040
            temp.type = ADDRESSOF;
3041
            VEC_safe_push (ce_s, heap, *results, &temp);
3042
            return;
3043
          }
3044
    }
3045
 
3046
  t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3047
 
3048
  /* Pretend to take the address of the base, we'll take care of
3049
     adding the required subset of sub-fields below.  */
3050
  get_constraint_for_1 (t, results, true, lhs_p);
3051
  gcc_assert (VEC_length (ce_s, *results) == 1);
3052
  result = VEC_last (ce_s, *results);
3053
 
3054
  if (result->type == SCALAR
3055
      && get_varinfo (result->var)->is_full_var)
3056
    /* For single-field vars do not bother about the offset.  */
3057
    result->offset = 0;
3058
  else if (result->type == SCALAR)
3059
    {
3060
      /* In languages like C, you can access one past the end of an
3061
         array.  You aren't allowed to dereference it, so we can
3062
         ignore this constraint. When we handle pointer subtraction,
3063
         we may have to do something cute here.  */
3064
 
3065
      if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
3066
          && bitmaxsize != 0)
3067
        {
3068
          /* It's also not true that the constraint will actually start at the
3069
             right offset, it may start in some padding.  We only care about
3070
             setting the constraint to the first actual field it touches, so
3071
             walk to find it.  */
3072
          struct constraint_expr cexpr = *result;
3073
          varinfo_t curr;
3074
          VEC_pop (ce_s, *results);
3075
          cexpr.offset = 0;
3076
          for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
3077
            {
3078
              if (ranges_overlap_p (curr->offset, curr->size,
3079
                                    bitpos, bitmaxsize))
3080
                {
3081
                  cexpr.var = curr->id;
3082
                  VEC_safe_push (ce_s, heap, *results, &cexpr);
3083
                  if (address_p)
3084
                    break;
3085
                }
3086
            }
3087
          /* If we are going to take the address of this field then
3088
             to be able to compute reachability correctly add at least
3089
             the last field of the variable.  */
3090
          if (address_p
3091
              && VEC_length (ce_s, *results) == 0)
3092
            {
3093
              curr = get_varinfo (cexpr.var);
3094
              while (curr->next != NULL)
3095
                curr = curr->next;
3096
              cexpr.var = curr->id;
3097
              VEC_safe_push (ce_s, heap, *results, &cexpr);
3098
            }
3099
          else if (VEC_length (ce_s, *results) == 0)
3100
            /* Assert that we found *some* field there. The user couldn't be
3101
               accessing *only* padding.  */
3102
            /* Still the user could access one past the end of an array
3103
               embedded in a struct resulting in accessing *only* padding.  */
3104
            /* Or accessing only padding via type-punning to a type
3105
               that has a filed just in padding space.  */
3106
            {
3107
              cexpr.type = SCALAR;
3108
              cexpr.var = anything_id;
3109
              cexpr.offset = 0;
3110
              VEC_safe_push (ce_s, heap, *results, &cexpr);
3111
            }
3112
        }
3113
      else if (bitmaxsize == 0)
3114
        {
3115
          if (dump_file && (dump_flags & TDF_DETAILS))
3116
            fprintf (dump_file, "Access to zero-sized part of variable,"
3117
                     "ignoring\n");
3118
        }
3119
      else
3120
        if (dump_file && (dump_flags & TDF_DETAILS))
3121
          fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3122
    }
3123
  else if (result->type == DEREF)
3124
    {
3125
      /* If we do not know exactly where the access goes say so.  Note
3126
         that only for non-structure accesses we know that we access
3127
         at most one subfiled of any variable.  */
3128
      if (bitpos == -1
3129
          || bitsize != bitmaxsize
3130
          || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3131
          || result->offset == UNKNOWN_OFFSET)
3132
        result->offset = UNKNOWN_OFFSET;
3133
      else
3134
        result->offset += bitpos;
3135
    }
3136
  else if (result->type == ADDRESSOF)
3137
    {
3138
      /* We can end up here for component references on a
3139
         VIEW_CONVERT_EXPR <>(&foobar).  */
3140
      result->type = SCALAR;
3141
      result->var = anything_id;
3142
      result->offset = 0;
3143
    }
3144
  else
3145
    gcc_unreachable ();
3146
}
3147
 
3148
 
3149
/* Dereference the constraint expression CONS, and return the result.
3150
   DEREF (ADDRESSOF) = SCALAR
3151
   DEREF (SCALAR) = DEREF
3152
   DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3153
   This is needed so that we can handle dereferencing DEREF constraints.  */
3154
 
3155
static void
3156
do_deref (VEC (ce_s, heap) **constraints)
3157
{
3158
  struct constraint_expr *c;
3159
  unsigned int i = 0;
3160
 
3161
  FOR_EACH_VEC_ELT (ce_s, *constraints, i, c)
3162
    {
3163
      if (c->type == SCALAR)
3164
        c->type = DEREF;
3165
      else if (c->type == ADDRESSOF)
3166
        c->type = SCALAR;
3167
      else if (c->type == DEREF)
3168
        {
3169
          struct constraint_expr tmplhs;
3170
          tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3171
          process_constraint (new_constraint (tmplhs, *c));
3172
          c->var = tmplhs.var;
3173
        }
3174
      else
3175
        gcc_unreachable ();
3176
    }
3177
}
3178
 
3179
/* Given a tree T, return the constraint expression for taking the
3180
   address of it.  */
3181
 
3182
static void
3183
get_constraint_for_address_of (tree t, VEC (ce_s, heap) **results)
3184
{
3185
  struct constraint_expr *c;
3186
  unsigned int i;
3187
 
3188
  get_constraint_for_1 (t, results, true, true);
3189
 
3190
  FOR_EACH_VEC_ELT (ce_s, *results, i, c)
3191
    {
3192
      if (c->type == DEREF)
3193
        c->type = SCALAR;
3194
      else
3195
        c->type = ADDRESSOF;
3196
    }
3197
}
3198
 
3199
/* Given a tree T, return the constraint expression for it.  */
3200
 
3201
static void
3202
get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p,
3203
                      bool lhs_p)
3204
{
3205
  struct constraint_expr temp;
3206
 
3207
  /* x = integer is all glommed to a single variable, which doesn't
3208
     point to anything by itself.  That is, of course, unless it is an
3209
     integer constant being treated as a pointer, in which case, we
3210
     will return that this is really the addressof anything.  This
3211
     happens below, since it will fall into the default case. The only
3212
     case we know something about an integer treated like a pointer is
3213
     when it is the NULL pointer, and then we just say it points to
3214
     NULL.
3215
 
3216
     Do not do that if -fno-delete-null-pointer-checks though, because
3217
     in that case *NULL does not fail, so it _should_ alias *anything.
3218
     It is not worth adding a new option or renaming the existing one,
3219
     since this case is relatively obscure.  */
3220
  if ((TREE_CODE (t) == INTEGER_CST
3221
       && integer_zerop (t))
3222
      /* The only valid CONSTRUCTORs in gimple with pointer typed
3223
         elements are zero-initializer.  But in IPA mode we also
3224
         process global initializers, so verify at least.  */
3225
      || (TREE_CODE (t) == CONSTRUCTOR
3226
          && CONSTRUCTOR_NELTS (t) == 0))
3227
    {
3228
      if (flag_delete_null_pointer_checks)
3229
        temp.var = nothing_id;
3230
      else
3231
        temp.var = nonlocal_id;
3232
      temp.type = ADDRESSOF;
3233
      temp.offset = 0;
3234
      VEC_safe_push (ce_s, heap, *results, &temp);
3235
      return;
3236
    }
3237
 
3238
  /* String constants are read-only.  */
3239
  if (TREE_CODE (t) == STRING_CST)
3240
    {
3241
      temp.var = readonly_id;
3242
      temp.type = SCALAR;
3243
      temp.offset = 0;
3244
      VEC_safe_push (ce_s, heap, *results, &temp);
3245
      return;
3246
    }
3247
 
3248
  switch (TREE_CODE_CLASS (TREE_CODE (t)))
3249
    {
3250
    case tcc_expression:
3251
      {
3252
        switch (TREE_CODE (t))
3253
          {
3254
          case ADDR_EXPR:
3255
            get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3256
            return;
3257
          default:;
3258
          }
3259
        break;
3260
      }
3261
    case tcc_reference:
3262
      {
3263
        switch (TREE_CODE (t))
3264
          {
3265
          case MEM_REF:
3266
            {
3267
              struct constraint_expr cs;
3268
              varinfo_t vi, curr;
3269
              get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3270
                                             TREE_OPERAND (t, 1), results);
3271
              do_deref (results);
3272
 
3273
              /* If we are not taking the address then make sure to process
3274
                 all subvariables we might access.  */
3275
              if (address_p)
3276
                return;
3277
 
3278
              cs = *VEC_last (ce_s, *results);
3279
              if (cs.type == DEREF
3280
                  && type_can_have_subvars (TREE_TYPE (t)))
3281
                {
3282
                  /* For dereferences this means we have to defer it
3283
                     to solving time.  */
3284
                  VEC_last (ce_s, *results)->offset = UNKNOWN_OFFSET;
3285
                  return;
3286
                }
3287
              if (cs.type != SCALAR)
3288
                return;
3289
 
3290
              vi = get_varinfo (cs.var);
3291
              curr = vi->next;
3292
              if (!vi->is_full_var
3293
                  && curr)
3294
                {
3295
                  unsigned HOST_WIDE_INT size;
3296
                  if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1))
3297
                    size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
3298
                  else
3299
                    size = -1;
3300
                  for (; curr; curr = curr->next)
3301
                    {
3302
                      if (curr->offset - vi->offset < size)
3303
                        {
3304
                          cs.var = curr->id;
3305
                          VEC_safe_push (ce_s, heap, *results, &cs);
3306
                        }
3307
                      else
3308
                        break;
3309
                    }
3310
                }
3311
              return;
3312
            }
3313
          case ARRAY_REF:
3314
          case ARRAY_RANGE_REF:
3315
          case COMPONENT_REF:
3316
            get_constraint_for_component_ref (t, results, address_p, lhs_p);
3317
            return;
3318
          case VIEW_CONVERT_EXPR:
3319
            get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3320
                                  lhs_p);
3321
            return;
3322
          /* We are missing handling for TARGET_MEM_REF here.  */
3323
          default:;
3324
          }
3325
        break;
3326
      }
3327
    case tcc_exceptional:
3328
      {
3329
        switch (TREE_CODE (t))
3330
          {
3331
          case SSA_NAME:
3332
            {
3333
              get_constraint_for_ssa_var (t, results, address_p);
3334
              return;
3335
            }
3336
          case CONSTRUCTOR:
3337
            {
3338
              unsigned int i;
3339
              tree val;
3340
              VEC (ce_s, heap) *tmp = NULL;
3341
              FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3342
                {
3343
                  struct constraint_expr *rhsp;
3344
                  unsigned j;
3345
                  get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3346
                  FOR_EACH_VEC_ELT (ce_s, tmp, j, rhsp)
3347
                    VEC_safe_push (ce_s, heap, *results, rhsp);
3348
                  VEC_truncate (ce_s, tmp, 0);
3349
                }
3350
              VEC_free (ce_s, heap, tmp);
3351
              /* We do not know whether the constructor was complete,
3352
                 so technically we have to add &NOTHING or &ANYTHING
3353
                 like we do for an empty constructor as well.  */
3354
              return;
3355
            }
3356
          default:;
3357
          }
3358
        break;
3359
      }
3360
    case tcc_declaration:
3361
      {
3362
        get_constraint_for_ssa_var (t, results, address_p);
3363
        return;
3364
      }
3365
    case tcc_constant:
3366
      {
3367
        /* We cannot refer to automatic variables through constants.  */
3368
        temp.type = ADDRESSOF;
3369
        temp.var = nonlocal_id;
3370
        temp.offset = 0;
3371
        VEC_safe_push (ce_s, heap, *results, &temp);
3372
        return;
3373
      }
3374
    default:;
3375
    }
3376
 
3377
  /* The default fallback is a constraint from anything.  */
3378
  temp.type = ADDRESSOF;
3379
  temp.var = anything_id;
3380
  temp.offset = 0;
3381
  VEC_safe_push (ce_s, heap, *results, &temp);
3382
}
3383
 
3384
/* Given a gimple tree T, return the constraint expression vector for it.  */
3385
 
3386
static void
3387
get_constraint_for (tree t, VEC (ce_s, heap) **results)
3388
{
3389
  gcc_assert (VEC_length (ce_s, *results) == 0);
3390
 
3391
  get_constraint_for_1 (t, results, false, true);
3392
}
3393
 
3394
/* Given a gimple tree T, return the constraint expression vector for it
3395
   to be used as the rhs of a constraint.  */
3396
 
3397
static void
3398
get_constraint_for_rhs (tree t, VEC (ce_s, heap) **results)
3399
{
3400
  gcc_assert (VEC_length (ce_s, *results) == 0);
3401
 
3402
  get_constraint_for_1 (t, results, false, false);
3403
}
3404
 
3405
 
3406
/* Efficiently generates constraints from all entries in *RHSC to all
3407
   entries in *LHSC.  */
3408
 
3409
static void
3410
process_all_all_constraints (VEC (ce_s, heap) *lhsc, VEC (ce_s, heap) *rhsc)
3411
{
3412
  struct constraint_expr *lhsp, *rhsp;
3413
  unsigned i, j;
3414
 
3415
  if (VEC_length (ce_s, lhsc) <= 1
3416
      || VEC_length (ce_s, rhsc) <= 1)
3417
    {
3418
      FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
3419
        FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
3420
          process_constraint (new_constraint (*lhsp, *rhsp));
3421
    }
3422
  else
3423
    {
3424
      struct constraint_expr tmp;
3425
      tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3426
      FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
3427
        process_constraint (new_constraint (tmp, *rhsp));
3428
      FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
3429
        process_constraint (new_constraint (*lhsp, tmp));
3430
    }
3431
}
3432
 
3433
/* Handle aggregate copies by expanding into copies of the respective
3434
   fields of the structures.  */
3435
 
3436
static void
3437
do_structure_copy (tree lhsop, tree rhsop)
3438
{
3439
  struct constraint_expr *lhsp, *rhsp;
3440
  VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3441
  unsigned j;
3442
 
3443
  get_constraint_for (lhsop, &lhsc);
3444
  get_constraint_for_rhs (rhsop, &rhsc);
3445
  lhsp = VEC_index (ce_s, lhsc, 0);
3446
  rhsp = VEC_index (ce_s, rhsc, 0);
3447
  if (lhsp->type == DEREF
3448
      || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3449
      || rhsp->type == DEREF)
3450
    {
3451
      if (lhsp->type == DEREF)
3452
        {
3453
          gcc_assert (VEC_length (ce_s, lhsc) == 1);
3454
          lhsp->offset = UNKNOWN_OFFSET;
3455
        }
3456
      if (rhsp->type == DEREF)
3457
        {
3458
          gcc_assert (VEC_length (ce_s, rhsc) == 1);
3459
          rhsp->offset = UNKNOWN_OFFSET;
3460
        }
3461
      process_all_all_constraints (lhsc, rhsc);
3462
    }
3463
  else if (lhsp->type == SCALAR
3464
           && (rhsp->type == SCALAR
3465
               || rhsp->type == ADDRESSOF))
3466
    {
3467
      HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3468
      HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3469
      unsigned k = 0;
3470
      get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3471
      get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3472
      for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp);)
3473
        {
3474
          varinfo_t lhsv, rhsv;
3475
          rhsp = VEC_index (ce_s, rhsc, k);
3476
          lhsv = get_varinfo (lhsp->var);
3477
          rhsv = get_varinfo (rhsp->var);
3478
          if (lhsv->may_have_pointers
3479
              && (lhsv->is_full_var
3480
                  || rhsv->is_full_var
3481
                  || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3482
                                       rhsv->offset + lhsoffset, rhsv->size)))
3483
            process_constraint (new_constraint (*lhsp, *rhsp));
3484
          if (!rhsv->is_full_var
3485
              && (lhsv->is_full_var
3486
                  || (lhsv->offset + rhsoffset + lhsv->size
3487
                      > rhsv->offset + lhsoffset + rhsv->size)))
3488
            {
3489
              ++k;
3490
              if (k >= VEC_length (ce_s, rhsc))
3491
                break;
3492
            }
3493
          else
3494
            ++j;
3495
        }
3496
    }
3497
  else
3498
    gcc_unreachable ();
3499
 
3500
  VEC_free (ce_s, heap, lhsc);
3501
  VEC_free (ce_s, heap, rhsc);
3502
}
3503
 
3504
/* Create constraints ID = { rhsc }.  */
3505
 
3506
static void
3507
make_constraints_to (unsigned id, VEC(ce_s, heap) *rhsc)
3508
{
3509
  struct constraint_expr *c;
3510
  struct constraint_expr includes;
3511
  unsigned int j;
3512
 
3513
  includes.var = id;
3514
  includes.offset = 0;
3515
  includes.type = SCALAR;
3516
 
3517
  FOR_EACH_VEC_ELT (ce_s, rhsc, j, c)
3518
    process_constraint (new_constraint (includes, *c));
3519
}
3520
 
3521
/* Create a constraint ID = OP.  */
3522
 
3523
static void
3524
make_constraint_to (unsigned id, tree op)
3525
{
3526
  VEC(ce_s, heap) *rhsc = NULL;
3527
  get_constraint_for_rhs (op, &rhsc);
3528
  make_constraints_to (id, rhsc);
3529
  VEC_free (ce_s, heap, rhsc);
3530
}
3531
 
3532
/* Create a constraint ID = &FROM.  */
3533
 
3534
static void
3535
make_constraint_from (varinfo_t vi, int from)
3536
{
3537
  struct constraint_expr lhs, rhs;
3538
 
3539
  lhs.var = vi->id;
3540
  lhs.offset = 0;
3541
  lhs.type = SCALAR;
3542
 
3543
  rhs.var = from;
3544
  rhs.offset = 0;
3545
  rhs.type = ADDRESSOF;
3546
  process_constraint (new_constraint (lhs, rhs));
3547
}
3548
 
3549
/* Create a constraint ID = FROM.  */
3550
 
3551
static void
3552
make_copy_constraint (varinfo_t vi, int from)
3553
{
3554
  struct constraint_expr lhs, rhs;
3555
 
3556
  lhs.var = vi->id;
3557
  lhs.offset = 0;
3558
  lhs.type = SCALAR;
3559
 
3560
  rhs.var = from;
3561
  rhs.offset = 0;
3562
  rhs.type = SCALAR;
3563
  process_constraint (new_constraint (lhs, rhs));
3564
}
3565
 
3566
/* Make constraints necessary to make OP escape.  */
3567
 
3568
static void
3569
make_escape_constraint (tree op)
3570
{
3571
  make_constraint_to (escaped_id, op);
3572
}
3573
 
3574
/* Add constraints to that the solution of VI is transitively closed.  */
3575
 
3576
static void
3577
make_transitive_closure_constraints (varinfo_t vi)
3578
{
3579
  struct constraint_expr lhs, rhs;
3580
 
3581
  /* VAR = *VAR;  */
3582
  lhs.type = SCALAR;
3583
  lhs.var = vi->id;
3584
  lhs.offset = 0;
3585
  rhs.type = DEREF;
3586
  rhs.var = vi->id;
3587
  rhs.offset = 0;
3588
  process_constraint (new_constraint (lhs, rhs));
3589
 
3590
  /* VAR = VAR + UNKNOWN;  */
3591
  lhs.type = SCALAR;
3592
  lhs.var = vi->id;
3593
  lhs.offset = 0;
3594
  rhs.type = SCALAR;
3595
  rhs.var = vi->id;
3596
  rhs.offset = UNKNOWN_OFFSET;
3597
  process_constraint (new_constraint (lhs, rhs));
3598
}
3599
 
3600
/* Temporary storage for fake var decls.  */
3601
struct obstack fake_var_decl_obstack;
3602
 
3603
/* Build a fake VAR_DECL acting as referrer to a DECL_UID.  */
3604
 
3605
static tree
3606
build_fake_var_decl (tree type)
3607
{
3608
  tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3609
  memset (decl, 0, sizeof (struct tree_var_decl));
3610
  TREE_SET_CODE (decl, VAR_DECL);
3611
  TREE_TYPE (decl) = type;
3612
  DECL_UID (decl) = allocate_decl_uid ();
3613
  SET_DECL_PT_UID (decl, -1);
3614
  layout_decl (decl, 0);
3615
  return decl;
3616
}
3617
 
3618
/* Create a new artificial heap variable with NAME.
3619
   Return the created variable.  */
3620
 
3621
static varinfo_t
3622
make_heapvar (const char *name)
3623
{
3624
  varinfo_t vi;
3625
  tree heapvar;
3626
 
3627
  heapvar = build_fake_var_decl (ptr_type_node);
3628
  DECL_EXTERNAL (heapvar) = 1;
3629
 
3630
  vi = new_var_info (heapvar, name);
3631
  vi->is_artificial_var = true;
3632
  vi->is_heap_var = true;
3633
  vi->is_unknown_size_var = true;
3634
  vi->offset = 0;
3635
  vi->fullsize = ~0;
3636
  vi->size = ~0;
3637
  vi->is_full_var = true;
3638
  insert_vi_for_tree (heapvar, vi);
3639
 
3640
  return vi;
3641
}
3642
 
3643
/* Create a new artificial heap variable with NAME and make a
3644
   constraint from it to LHS.  Set flags according to a tag used
3645
   for tracking restrict pointers.  */
3646
 
3647
static varinfo_t
3648
make_constraint_from_restrict (varinfo_t lhs, const char *name)
3649
{
3650
  varinfo_t vi = make_heapvar (name);
3651
  vi->is_global_var = 1;
3652
  vi->may_have_pointers = 1;
3653
  make_constraint_from (lhs, vi->id);
3654
  return vi;
3655
}
3656
 
3657
/* Create a new artificial heap variable with NAME and make a
3658
   constraint from it to LHS.  Set flags according to a tag used
3659
   for tracking restrict pointers and make the artificial heap
3660
   point to global memory.  */
3661
 
3662
static varinfo_t
3663
make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3664
{
3665
  varinfo_t vi = make_constraint_from_restrict (lhs, name);
3666
  make_copy_constraint (vi, nonlocal_id);
3667
  return vi;
3668
}
3669
 
3670
/* In IPA mode there are varinfos for different aspects of reach
3671
   function designator.  One for the points-to set of the return
3672
   value, one for the variables that are clobbered by the function,
3673
   one for its uses and one for each parameter (including a single
3674
   glob for remaining variadic arguments).  */
3675
 
3676
enum { fi_clobbers = 1, fi_uses = 2,
3677
       fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3678
 
3679
/* Get a constraint for the requested part of a function designator FI
3680
   when operating in IPA mode.  */
3681
 
3682
static struct constraint_expr
3683
get_function_part_constraint (varinfo_t fi, unsigned part)
3684
{
3685
  struct constraint_expr c;
3686
 
3687
  gcc_assert (in_ipa_mode);
3688
 
3689
  if (fi->id == anything_id)
3690
    {
3691
      /* ???  We probably should have a ANYFN special variable.  */
3692
      c.var = anything_id;
3693
      c.offset = 0;
3694
      c.type = SCALAR;
3695
    }
3696
  else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3697
    {
3698
      varinfo_t ai = first_vi_for_offset (fi, part);
3699
      if (ai)
3700
        c.var = ai->id;
3701
      else
3702
        c.var = anything_id;
3703
      c.offset = 0;
3704
      c.type = SCALAR;
3705
    }
3706
  else
3707
    {
3708
      c.var = fi->id;
3709
      c.offset = part;
3710
      c.type = DEREF;
3711
    }
3712
 
3713
  return c;
3714
}
3715
 
3716
/* For non-IPA mode, generate constraints necessary for a call on the
3717
   RHS.  */
3718
 
3719
static void
3720
handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results)
3721
{
3722
  struct constraint_expr rhsc;
3723
  unsigned i;
3724
  bool returns_uses = false;
3725
 
3726
  for (i = 0; i < gimple_call_num_args (stmt); ++i)
3727
    {
3728
      tree arg = gimple_call_arg (stmt, i);
3729
      int flags = gimple_call_arg_flags (stmt, i);
3730
 
3731
      /* If the argument is not used we can ignore it.  */
3732
      if (flags & EAF_UNUSED)
3733
        continue;
3734
 
3735
      /* As we compute ESCAPED context-insensitive we do not gain
3736
         any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3737
         set.  The argument would still get clobbered through the
3738
         escape solution.
3739
         ???  We might get away with less (and more precise) constraints
3740
         if using a temporary for transitively closing things.  */
3741
      if ((flags & EAF_NOCLOBBER)
3742
           && (flags & EAF_NOESCAPE))
3743
        {
3744
          varinfo_t uses = get_call_use_vi (stmt);
3745
          if (!(flags & EAF_DIRECT))
3746
            make_transitive_closure_constraints (uses);
3747
          make_constraint_to (uses->id, arg);
3748
          returns_uses = true;
3749
        }
3750
      else if (flags & EAF_NOESCAPE)
3751
        {
3752
          varinfo_t uses = get_call_use_vi (stmt);
3753
          varinfo_t clobbers = get_call_clobber_vi (stmt);
3754
          if (!(flags & EAF_DIRECT))
3755
            {
3756
              make_transitive_closure_constraints (uses);
3757
              make_transitive_closure_constraints (clobbers);
3758
            }
3759
          make_constraint_to (uses->id, arg);
3760
          make_constraint_to (clobbers->id, arg);
3761
          returns_uses = true;
3762
        }
3763
      else
3764
        make_escape_constraint (arg);
3765
    }
3766
 
3767
  /* If we added to the calls uses solution make sure we account for
3768
     pointers to it to be returned.  */
3769
  if (returns_uses)
3770
    {
3771
      rhsc.var = get_call_use_vi (stmt)->id;
3772
      rhsc.offset = 0;
3773
      rhsc.type = SCALAR;
3774
      VEC_safe_push (ce_s, heap, *results, &rhsc);
3775
    }
3776
 
3777
  /* The static chain escapes as well.  */
3778
  if (gimple_call_chain (stmt))
3779
    make_escape_constraint (gimple_call_chain (stmt));
3780
 
3781
  /* And if we applied NRV the address of the return slot escapes as well.  */
3782
  if (gimple_call_return_slot_opt_p (stmt)
3783
      && gimple_call_lhs (stmt) != NULL_TREE
3784
      && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3785
    {
3786
      VEC(ce_s, heap) *tmpc = NULL;
3787
      struct constraint_expr lhsc, *c;
3788
      get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3789
      lhsc.var = escaped_id;
3790
      lhsc.offset = 0;
3791
      lhsc.type = SCALAR;
3792
      FOR_EACH_VEC_ELT (ce_s, tmpc, i, c)
3793
        process_constraint (new_constraint (lhsc, *c));
3794
      VEC_free(ce_s, heap, tmpc);
3795
    }
3796
 
3797
  /* Regular functions return nonlocal memory.  */
3798
  rhsc.var = nonlocal_id;
3799
  rhsc.offset = 0;
3800
  rhsc.type = SCALAR;
3801
  VEC_safe_push (ce_s, heap, *results, &rhsc);
3802
}
3803
 
3804
/* For non-IPA mode, generate constraints necessary for a call
3805
   that returns a pointer and assigns it to LHS.  This simply makes
3806
   the LHS point to global and escaped variables.  */
3807
 
3808
static void
3809
handle_lhs_call (gimple stmt, tree lhs, int flags, VEC(ce_s, heap) *rhsc,
3810
                 tree fndecl)
3811
{
3812
  VEC(ce_s, heap) *lhsc = NULL;
3813
 
3814
  get_constraint_for (lhs, &lhsc);
3815
  /* If the store is to a global decl make sure to
3816
     add proper escape constraints.  */
3817
  lhs = get_base_address (lhs);
3818
  if (lhs
3819
      && DECL_P (lhs)
3820
      && is_global_var (lhs))
3821
    {
3822
      struct constraint_expr tmpc;
3823
      tmpc.var = escaped_id;
3824
      tmpc.offset = 0;
3825
      tmpc.type = SCALAR;
3826
      VEC_safe_push (ce_s, heap, lhsc, &tmpc);
3827
    }
3828
 
3829
  /* If the call returns an argument unmodified override the rhs
3830
     constraints.  */
3831
  flags = gimple_call_return_flags (stmt);
3832
  if (flags & ERF_RETURNS_ARG
3833
      && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
3834
    {
3835
      tree arg;
3836
      rhsc = NULL;
3837
      arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
3838
      get_constraint_for (arg, &rhsc);
3839
      process_all_all_constraints (lhsc, rhsc);
3840
      VEC_free (ce_s, heap, rhsc);
3841
    }
3842
  else if (flags & ERF_NOALIAS)
3843
    {
3844
      varinfo_t vi;
3845
      struct constraint_expr tmpc;
3846
      rhsc = NULL;
3847
      vi = make_heapvar ("HEAP");
3848
      /* We delay marking allocated storage global until we know if
3849
         it escapes.  */
3850
      DECL_EXTERNAL (vi->decl) = 0;
3851
      vi->is_global_var = 0;
3852
      /* If this is not a real malloc call assume the memory was
3853
         initialized and thus may point to global memory.  All
3854
         builtin functions with the malloc attribute behave in a sane way.  */
3855
      if (!fndecl
3856
          || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3857
        make_constraint_from (vi, nonlocal_id);
3858
      tmpc.var = vi->id;
3859
      tmpc.offset = 0;
3860
      tmpc.type = ADDRESSOF;
3861
      VEC_safe_push (ce_s, heap, rhsc, &tmpc);
3862
    }
3863
 
3864
  process_all_all_constraints (lhsc, rhsc);
3865
 
3866
  VEC_free (ce_s, heap, lhsc);
3867
}
3868
 
3869
/* For non-IPA mode, generate constraints necessary for a call of a
3870
   const function that returns a pointer in the statement STMT.  */
3871
 
3872
static void
3873
handle_const_call (gimple stmt, VEC(ce_s, heap) **results)
3874
{
3875
  struct constraint_expr rhsc;
3876
  unsigned int k;
3877
 
3878
  /* Treat nested const functions the same as pure functions as far
3879
     as the static chain is concerned.  */
3880
  if (gimple_call_chain (stmt))
3881
    {
3882
      varinfo_t uses = get_call_use_vi (stmt);
3883
      make_transitive_closure_constraints (uses);
3884
      make_constraint_to (uses->id, gimple_call_chain (stmt));
3885
      rhsc.var = uses->id;
3886
      rhsc.offset = 0;
3887
      rhsc.type = SCALAR;
3888
      VEC_safe_push (ce_s, heap, *results, &rhsc);
3889
    }
3890
 
3891
  /* May return arguments.  */
3892
  for (k = 0; k < gimple_call_num_args (stmt); ++k)
3893
    {
3894
      tree arg = gimple_call_arg (stmt, k);
3895
      VEC(ce_s, heap) *argc = NULL;
3896
      unsigned i;
3897
      struct constraint_expr *argp;
3898
      get_constraint_for_rhs (arg, &argc);
3899
      FOR_EACH_VEC_ELT (ce_s, argc, i, argp)
3900
        VEC_safe_push (ce_s, heap, *results, argp);
3901
      VEC_free(ce_s, heap, argc);
3902
    }
3903
 
3904
  /* May return addresses of globals.  */
3905
  rhsc.var = nonlocal_id;
3906
  rhsc.offset = 0;
3907
  rhsc.type = ADDRESSOF;
3908
  VEC_safe_push (ce_s, heap, *results, &rhsc);
3909
}
3910
 
3911
/* For non-IPA mode, generate constraints necessary for a call to a
3912
   pure function in statement STMT.  */
3913
 
3914
static void
3915
handle_pure_call (gimple stmt, VEC(ce_s, heap) **results)
3916
{
3917
  struct constraint_expr rhsc;
3918
  unsigned i;
3919
  varinfo_t uses = NULL;
3920
 
3921
  /* Memory reached from pointer arguments is call-used.  */
3922
  for (i = 0; i < gimple_call_num_args (stmt); ++i)
3923
    {
3924
      tree arg = gimple_call_arg (stmt, i);
3925
      if (!uses)
3926
        {
3927
          uses = get_call_use_vi (stmt);
3928
          make_transitive_closure_constraints (uses);
3929
        }
3930
      make_constraint_to (uses->id, arg);
3931
    }
3932
 
3933
  /* The static chain is used as well.  */
3934
  if (gimple_call_chain (stmt))
3935
    {
3936
      if (!uses)
3937
        {
3938
          uses = get_call_use_vi (stmt);
3939
          make_transitive_closure_constraints (uses);
3940
        }
3941
      make_constraint_to (uses->id, gimple_call_chain (stmt));
3942
    }
3943
 
3944
  /* Pure functions may return call-used and nonlocal memory.  */
3945
  if (uses)
3946
    {
3947
      rhsc.var = uses->id;
3948
      rhsc.offset = 0;
3949
      rhsc.type = SCALAR;
3950
      VEC_safe_push (ce_s, heap, *results, &rhsc);
3951
    }
3952
  rhsc.var = nonlocal_id;
3953
  rhsc.offset = 0;
3954
  rhsc.type = SCALAR;
3955
  VEC_safe_push (ce_s, heap, *results, &rhsc);
3956
}
3957
 
3958
 
3959
/* Return the varinfo for the callee of CALL.  */
3960
 
3961
static varinfo_t
3962
get_fi_for_callee (gimple call)
3963
{
3964
  tree decl, fn = gimple_call_fn (call);
3965
 
3966
  if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
3967
    fn = OBJ_TYPE_REF_EXPR (fn);
3968
 
3969
  /* If we can directly resolve the function being called, do so.
3970
     Otherwise, it must be some sort of indirect expression that
3971
     we should still be able to handle.  */
3972
  decl = gimple_call_addr_fndecl (fn);
3973
  if (decl)
3974
    return get_vi_for_tree (decl);
3975
 
3976
  /* If the function is anything other than a SSA name pointer we have no
3977
     clue and should be getting ANYFN (well, ANYTHING for now).  */
3978
  if (!fn || TREE_CODE (fn) != SSA_NAME)
3979
    return get_varinfo (anything_id);
3980
 
3981
  if ((TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
3982
       || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL)
3983
      && SSA_NAME_IS_DEFAULT_DEF (fn))
3984
    fn = SSA_NAME_VAR (fn);
3985
 
3986
  return get_vi_for_tree (fn);
3987
}
3988
 
3989
/* Create constraints for the builtin call T.  Return true if the call
3990
   was handled, otherwise false.  */
3991
 
3992
static bool
3993
find_func_aliases_for_builtin_call (gimple t)
3994
{
3995
  tree fndecl = gimple_call_fndecl (t);
3996
  VEC(ce_s, heap) *lhsc = NULL;
3997
  VEC(ce_s, heap) *rhsc = NULL;
3998
  varinfo_t fi;
3999
 
4000
  if (fndecl != NULL_TREE
4001
      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4002
    /* ???  All builtins that are handled here need to be handled
4003
       in the alias-oracle query functions explicitly!  */
4004
    switch (DECL_FUNCTION_CODE (fndecl))
4005
      {
4006
      /* All the following functions return a pointer to the same object
4007
         as their first argument points to.  The functions do not add
4008
         to the ESCAPED solution.  The functions make the first argument
4009
         pointed to memory point to what the second argument pointed to
4010
         memory points to.  */
4011
      case BUILT_IN_STRCPY:
4012
      case BUILT_IN_STRNCPY:
4013
      case BUILT_IN_BCOPY:
4014
      case BUILT_IN_MEMCPY:
4015
      case BUILT_IN_MEMMOVE:
4016
      case BUILT_IN_MEMPCPY:
4017
      case BUILT_IN_STPCPY:
4018
      case BUILT_IN_STPNCPY:
4019
      case BUILT_IN_STRCAT:
4020
      case BUILT_IN_STRNCAT:
4021
      case BUILT_IN_STRCPY_CHK:
4022
      case BUILT_IN_STRNCPY_CHK:
4023
      case BUILT_IN_MEMCPY_CHK:
4024
      case BUILT_IN_MEMMOVE_CHK:
4025
      case BUILT_IN_MEMPCPY_CHK:
4026
      case BUILT_IN_STPCPY_CHK:
4027
      case BUILT_IN_STPNCPY_CHK:
4028
      case BUILT_IN_STRCAT_CHK:
4029
      case BUILT_IN_STRNCAT_CHK:
4030
      case BUILT_IN_TM_MEMCPY:
4031
      case BUILT_IN_TM_MEMMOVE:
4032
        {
4033
          tree res = gimple_call_lhs (t);
4034
          tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4035
                                           == BUILT_IN_BCOPY ? 1 : 0));
4036
          tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4037
                                          == BUILT_IN_BCOPY ? 0 : 1));
4038
          if (res != NULL_TREE)
4039
            {
4040
              get_constraint_for (res, &lhsc);
4041
              if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4042
                  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4043
                  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4044
                  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4045
                  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4046
                  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4047
                get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4048
              else
4049
                get_constraint_for (dest, &rhsc);
4050
              process_all_all_constraints (lhsc, rhsc);
4051
              VEC_free (ce_s, heap, lhsc);
4052
              VEC_free (ce_s, heap, rhsc);
4053
            }
4054
          get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4055
          get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4056
          do_deref (&lhsc);
4057
          do_deref (&rhsc);
4058
          process_all_all_constraints (lhsc, rhsc);
4059
          VEC_free (ce_s, heap, lhsc);
4060
          VEC_free (ce_s, heap, rhsc);
4061
          return true;
4062
        }
4063
      case BUILT_IN_MEMSET:
4064
      case BUILT_IN_MEMSET_CHK:
4065
      case BUILT_IN_TM_MEMSET:
4066
        {
4067
          tree res = gimple_call_lhs (t);
4068
          tree dest = gimple_call_arg (t, 0);
4069
          unsigned i;
4070
          ce_s *lhsp;
4071
          struct constraint_expr ac;
4072
          if (res != NULL_TREE)
4073
            {
4074
              get_constraint_for (res, &lhsc);
4075
              get_constraint_for (dest, &rhsc);
4076
              process_all_all_constraints (lhsc, rhsc);
4077
              VEC_free (ce_s, heap, lhsc);
4078
              VEC_free (ce_s, heap, rhsc);
4079
            }
4080
          get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4081
          do_deref (&lhsc);
4082
          if (flag_delete_null_pointer_checks
4083
              && integer_zerop (gimple_call_arg (t, 1)))
4084
            {
4085
              ac.type = ADDRESSOF;
4086
              ac.var = nothing_id;
4087
            }
4088
          else
4089
            {
4090
              ac.type = SCALAR;
4091
              ac.var = integer_id;
4092
            }
4093
          ac.offset = 0;
4094
          FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
4095
              process_constraint (new_constraint (*lhsp, ac));
4096
          VEC_free (ce_s, heap, lhsc);
4097
          return true;
4098
        }
4099
      case BUILT_IN_ASSUME_ALIGNED:
4100
        {
4101
          tree res = gimple_call_lhs (t);
4102
          tree dest = gimple_call_arg (t, 0);
4103
          if (res != NULL_TREE)
4104
            {
4105
              get_constraint_for (res, &lhsc);
4106
              get_constraint_for (dest, &rhsc);
4107
              process_all_all_constraints (lhsc, rhsc);
4108
              VEC_free (ce_s, heap, lhsc);
4109
              VEC_free (ce_s, heap, rhsc);
4110
            }
4111
          return true;
4112
        }
4113
      /* All the following functions do not return pointers, do not
4114
         modify the points-to sets of memory reachable from their
4115
         arguments and do not add to the ESCAPED solution.  */
4116
      case BUILT_IN_SINCOS:
4117
      case BUILT_IN_SINCOSF:
4118
      case BUILT_IN_SINCOSL:
4119
      case BUILT_IN_FREXP:
4120
      case BUILT_IN_FREXPF:
4121
      case BUILT_IN_FREXPL:
4122
      case BUILT_IN_GAMMA_R:
4123
      case BUILT_IN_GAMMAF_R:
4124
      case BUILT_IN_GAMMAL_R:
4125
      case BUILT_IN_LGAMMA_R:
4126
      case BUILT_IN_LGAMMAF_R:
4127
      case BUILT_IN_LGAMMAL_R:
4128
      case BUILT_IN_MODF:
4129
      case BUILT_IN_MODFF:
4130
      case BUILT_IN_MODFL:
4131
      case BUILT_IN_REMQUO:
4132
      case BUILT_IN_REMQUOF:
4133
      case BUILT_IN_REMQUOL:
4134
      case BUILT_IN_FREE:
4135
        return true;
4136
      case BUILT_IN_STRDUP:
4137
      case BUILT_IN_STRNDUP:
4138
        if (gimple_call_lhs (t))
4139
          {
4140
            handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
4141
                             NULL, fndecl);
4142
            get_constraint_for_ptr_offset (gimple_call_lhs (t),
4143
                                           NULL_TREE, &lhsc);
4144
            get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4145
                                           NULL_TREE, &rhsc);
4146
            do_deref (&lhsc);
4147
            do_deref (&rhsc);
4148
            process_all_all_constraints (lhsc, rhsc);
4149
            VEC_free (ce_s, heap, lhsc);
4150
            VEC_free (ce_s, heap, rhsc);
4151
            return true;
4152
          }
4153
        break;
4154
      /* Trampolines are special - they set up passing the static
4155
         frame.  */
4156
      case BUILT_IN_INIT_TRAMPOLINE:
4157
        {
4158
          tree tramp = gimple_call_arg (t, 0);
4159
          tree nfunc = gimple_call_arg (t, 1);
4160
          tree frame = gimple_call_arg (t, 2);
4161
          unsigned i;
4162
          struct constraint_expr lhs, *rhsp;
4163
          if (in_ipa_mode)
4164
            {
4165
              varinfo_t nfi = NULL;
4166
              gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4167
              nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4168
              if (nfi)
4169
                {
4170
                  lhs = get_function_part_constraint (nfi, fi_static_chain);
4171
                  get_constraint_for (frame, &rhsc);
4172
                  FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
4173
                      process_constraint (new_constraint (lhs, *rhsp));
4174
                  VEC_free (ce_s, heap, rhsc);
4175
 
4176
                  /* Make the frame point to the function for
4177
                     the trampoline adjustment call.  */
4178
                  get_constraint_for (tramp, &lhsc);
4179
                  do_deref (&lhsc);
4180
                  get_constraint_for (nfunc, &rhsc);
4181
                  process_all_all_constraints (lhsc, rhsc);
4182
                  VEC_free (ce_s, heap, rhsc);
4183
                  VEC_free (ce_s, heap, lhsc);
4184
 
4185
                  return true;
4186
                }
4187
            }
4188
          /* Else fallthru to generic handling which will let
4189
             the frame escape.  */
4190
          break;
4191
        }
4192
      case BUILT_IN_ADJUST_TRAMPOLINE:
4193
        {
4194
          tree tramp = gimple_call_arg (t, 0);
4195
          tree res = gimple_call_lhs (t);
4196
          if (in_ipa_mode && res)
4197
            {
4198
              get_constraint_for (res, &lhsc);
4199
              get_constraint_for (tramp, &rhsc);
4200
              do_deref (&rhsc);
4201
              process_all_all_constraints (lhsc, rhsc);
4202
              VEC_free (ce_s, heap, rhsc);
4203
              VEC_free (ce_s, heap, lhsc);
4204
            }
4205
          return true;
4206
        }
4207
      CASE_BUILT_IN_TM_STORE (1):
4208
      CASE_BUILT_IN_TM_STORE (2):
4209
      CASE_BUILT_IN_TM_STORE (4):
4210
      CASE_BUILT_IN_TM_STORE (8):
4211
      CASE_BUILT_IN_TM_STORE (FLOAT):
4212
      CASE_BUILT_IN_TM_STORE (DOUBLE):
4213
      CASE_BUILT_IN_TM_STORE (LDOUBLE):
4214
      CASE_BUILT_IN_TM_STORE (M64):
4215
      CASE_BUILT_IN_TM_STORE (M128):
4216
      CASE_BUILT_IN_TM_STORE (M256):
4217
        {
4218
          tree addr = gimple_call_arg (t, 0);
4219
          tree src = gimple_call_arg (t, 1);
4220
 
4221
          get_constraint_for (addr, &lhsc);
4222
          do_deref (&lhsc);
4223
          get_constraint_for (src, &rhsc);
4224
          process_all_all_constraints (lhsc, rhsc);
4225
          VEC_free (ce_s, heap, lhsc);
4226
          VEC_free (ce_s, heap, rhsc);
4227
          return true;
4228
        }
4229
      CASE_BUILT_IN_TM_LOAD (1):
4230
      CASE_BUILT_IN_TM_LOAD (2):
4231
      CASE_BUILT_IN_TM_LOAD (4):
4232
      CASE_BUILT_IN_TM_LOAD (8):
4233
      CASE_BUILT_IN_TM_LOAD (FLOAT):
4234
      CASE_BUILT_IN_TM_LOAD (DOUBLE):
4235
      CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4236
      CASE_BUILT_IN_TM_LOAD (M64):
4237
      CASE_BUILT_IN_TM_LOAD (M128):
4238
      CASE_BUILT_IN_TM_LOAD (M256):
4239
        {
4240
          tree dest = gimple_call_lhs (t);
4241
          tree addr = gimple_call_arg (t, 0);
4242
 
4243
          get_constraint_for (dest, &lhsc);
4244
          get_constraint_for (addr, &rhsc);
4245
          do_deref (&rhsc);
4246
          process_all_all_constraints (lhsc, rhsc);
4247
          VEC_free (ce_s, heap, lhsc);
4248
          VEC_free (ce_s, heap, rhsc);
4249
          return true;
4250
        }
4251
      /* Variadic argument handling needs to be handled in IPA
4252
         mode as well.  */
4253
      case BUILT_IN_VA_START:
4254
        {
4255
          tree valist = gimple_call_arg (t, 0);
4256
          struct constraint_expr rhs, *lhsp;
4257
          unsigned i;
4258
          get_constraint_for (valist, &lhsc);
4259
          do_deref (&lhsc);
4260
          /* The va_list gets access to pointers in variadic
4261
             arguments.  Which we know in the case of IPA analysis
4262
             and otherwise are just all nonlocal variables.  */
4263
          if (in_ipa_mode)
4264
            {
4265
              fi = lookup_vi_for_tree (cfun->decl);
4266
              rhs = get_function_part_constraint (fi, ~0);
4267
              rhs.type = ADDRESSOF;
4268
            }
4269
          else
4270
            {
4271
              rhs.var = nonlocal_id;
4272
              rhs.type = ADDRESSOF;
4273
              rhs.offset = 0;
4274
            }
4275
          FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
4276
            process_constraint (new_constraint (*lhsp, rhs));
4277
          VEC_free (ce_s, heap, lhsc);
4278
          /* va_list is clobbered.  */
4279
          make_constraint_to (get_call_clobber_vi (t)->id, valist);
4280
          return true;
4281
        }
4282
      /* va_end doesn't have any effect that matters.  */
4283
      case BUILT_IN_VA_END:
4284
        return true;
4285
      /* Alternate return.  Simply give up for now.  */
4286
      case BUILT_IN_RETURN:
4287
        {
4288
          fi = NULL;
4289
          if (!in_ipa_mode
4290
              || !(fi = get_vi_for_tree (cfun->decl)))
4291
            make_constraint_from (get_varinfo (escaped_id), anything_id);
4292
          else if (in_ipa_mode
4293
                   && fi != NULL)
4294
            {
4295
              struct constraint_expr lhs, rhs;
4296
              lhs = get_function_part_constraint (fi, fi_result);
4297
              rhs.var = anything_id;
4298
              rhs.offset = 0;
4299
              rhs.type = SCALAR;
4300
              process_constraint (new_constraint (lhs, rhs));
4301
            }
4302
          return true;
4303
        }
4304
      /* printf-style functions may have hooks to set pointers to
4305
         point to somewhere into the generated string.  Leave them
4306
         for a later excercise...  */
4307
      default:
4308
        /* Fallthru to general call handling.  */;
4309
      }
4310
 
4311
  return false;
4312
}
4313
 
4314
/* Create constraints for the call T.  */
4315
 
4316
static void
4317
find_func_aliases_for_call (gimple t)
4318
{
4319
  tree fndecl = gimple_call_fndecl (t);
4320
  VEC(ce_s, heap) *lhsc = NULL;
4321
  VEC(ce_s, heap) *rhsc = NULL;
4322
  varinfo_t fi;
4323
 
4324
  if (fndecl != NULL_TREE
4325
      && DECL_BUILT_IN (fndecl)
4326
      && find_func_aliases_for_builtin_call (t))
4327
    return;
4328
 
4329
  fi = get_fi_for_callee (t);
4330
  if (!in_ipa_mode
4331
      || (fndecl && !fi->is_fn_info))
4332
    {
4333
      VEC(ce_s, heap) *rhsc = NULL;
4334
      int flags = gimple_call_flags (t);
4335
 
4336
      /* Const functions can return their arguments and addresses
4337
         of global memory but not of escaped memory.  */
4338
      if (flags & (ECF_CONST|ECF_NOVOPS))
4339
        {
4340
          if (gimple_call_lhs (t))
4341
            handle_const_call (t, &rhsc);
4342
        }
4343
      /* Pure functions can return addresses in and of memory
4344
         reachable from their arguments, but they are not an escape
4345
         point for reachable memory of their arguments.  */
4346
      else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4347
        handle_pure_call (t, &rhsc);
4348
      else
4349
        handle_rhs_call (t, &rhsc);
4350
      if (gimple_call_lhs (t))
4351
        handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4352
      VEC_free (ce_s, heap, rhsc);
4353
    }
4354
  else
4355
    {
4356
      tree lhsop;
4357
      unsigned j;
4358
 
4359
      /* Assign all the passed arguments to the appropriate incoming
4360
         parameters of the function.  */
4361
      for (j = 0; j < gimple_call_num_args (t); j++)
4362
        {
4363
          struct constraint_expr lhs ;
4364
          struct constraint_expr *rhsp;
4365
          tree arg = gimple_call_arg (t, j);
4366
 
4367
          get_constraint_for_rhs (arg, &rhsc);
4368
          lhs = get_function_part_constraint (fi, fi_parm_base + j);
4369
          while (VEC_length (ce_s, rhsc) != 0)
4370
            {
4371
              rhsp = VEC_last (ce_s, rhsc);
4372
              process_constraint (new_constraint (lhs, *rhsp));
4373
              VEC_pop (ce_s, rhsc);
4374
            }
4375
        }
4376
 
4377
      /* If we are returning a value, assign it to the result.  */
4378
      lhsop = gimple_call_lhs (t);
4379
      if (lhsop)
4380
        {
4381
          struct constraint_expr rhs;
4382
          struct constraint_expr *lhsp;
4383
 
4384
          get_constraint_for (lhsop, &lhsc);
4385
          rhs = get_function_part_constraint (fi, fi_result);
4386
          if (fndecl
4387
              && DECL_RESULT (fndecl)
4388
              && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4389
            {
4390
              VEC(ce_s, heap) *tem = NULL;
4391
              VEC_safe_push (ce_s, heap, tem, &rhs);
4392
              do_deref (&tem);
4393
              rhs = *VEC_index (ce_s, tem, 0);
4394
              VEC_free(ce_s, heap, tem);
4395
            }
4396
          FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
4397
            process_constraint (new_constraint (*lhsp, rhs));
4398
        }
4399
 
4400
      /* If we pass the result decl by reference, honor that.  */
4401
      if (lhsop
4402
          && fndecl
4403
          && DECL_RESULT (fndecl)
4404
          && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4405
        {
4406
          struct constraint_expr lhs;
4407
          struct constraint_expr *rhsp;
4408
 
4409
          get_constraint_for_address_of (lhsop, &rhsc);
4410
          lhs = get_function_part_constraint (fi, fi_result);
4411
          FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
4412
            process_constraint (new_constraint (lhs, *rhsp));
4413
          VEC_free (ce_s, heap, rhsc);
4414
        }
4415
 
4416
      /* If we use a static chain, pass it along.  */
4417
      if (gimple_call_chain (t))
4418
        {
4419
          struct constraint_expr lhs;
4420
          struct constraint_expr *rhsp;
4421
 
4422
          get_constraint_for (gimple_call_chain (t), &rhsc);
4423
          lhs = get_function_part_constraint (fi, fi_static_chain);
4424
          FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
4425
            process_constraint (new_constraint (lhs, *rhsp));
4426
        }
4427
    }
4428
}
4429
 
4430
/* Walk statement T setting up aliasing constraints according to the
4431
   references found in T.  This function is the main part of the
4432
   constraint builder.  AI points to auxiliary alias information used
4433
   when building alias sets and computing alias grouping heuristics.  */
4434
 
4435
static void
4436
find_func_aliases (gimple origt)
4437
{
4438
  gimple t = origt;
4439
  VEC(ce_s, heap) *lhsc = NULL;
4440
  VEC(ce_s, heap) *rhsc = NULL;
4441
  struct constraint_expr *c;
4442
  varinfo_t fi;
4443
 
4444
  /* Now build constraints expressions.  */
4445
  if (gimple_code (t) == GIMPLE_PHI)
4446
    {
4447
      size_t i;
4448
      unsigned int j;
4449
 
4450
      /* For a phi node, assign all the arguments to
4451
         the result.  */
4452
      get_constraint_for (gimple_phi_result (t), &lhsc);
4453
      for (i = 0; i < gimple_phi_num_args (t); i++)
4454
        {
4455
          tree strippedrhs = PHI_ARG_DEF (t, i);
4456
 
4457
          STRIP_NOPS (strippedrhs);
4458
          get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4459
 
4460
          FOR_EACH_VEC_ELT (ce_s, lhsc, j, c)
4461
            {
4462
              struct constraint_expr *c2;
4463
              while (VEC_length (ce_s, rhsc) > 0)
4464
                {
4465
                  c2 = VEC_last (ce_s, rhsc);
4466
                  process_constraint (new_constraint (*c, *c2));
4467
                  VEC_pop (ce_s, rhsc);
4468
                }
4469
            }
4470
        }
4471
    }
4472
  /* In IPA mode, we need to generate constraints to pass call
4473
     arguments through their calls.   There are two cases,
4474
     either a GIMPLE_CALL returning a value, or just a plain
4475
     GIMPLE_CALL when we are not.
4476
 
4477
     In non-ipa mode, we need to generate constraints for each
4478
     pointer passed by address.  */
4479
  else if (is_gimple_call (t))
4480
    find_func_aliases_for_call (t);
4481
 
4482
  /* Otherwise, just a regular assignment statement.  Only care about
4483
     operations with pointer result, others are dealt with as escape
4484
     points if they have pointer operands.  */
4485
  else if (is_gimple_assign (t))
4486
    {
4487
      /* Otherwise, just a regular assignment statement.  */
4488
      tree lhsop = gimple_assign_lhs (t);
4489
      tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4490
 
4491
      if (rhsop && TREE_CLOBBER_P (rhsop))
4492
        /* Ignore clobbers, they don't actually store anything into
4493
           the LHS.  */
4494
        ;
4495
      else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4496
        do_structure_copy (lhsop, rhsop);
4497
      else
4498
        {
4499
          enum tree_code code = gimple_assign_rhs_code (t);
4500
 
4501
          get_constraint_for (lhsop, &lhsc);
4502
 
4503
          if (code == POINTER_PLUS_EXPR)
4504
            get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4505
                                           gimple_assign_rhs2 (t), &rhsc);
4506
          else if (code == BIT_AND_EXPR
4507
                   && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4508
            {
4509
              /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4510
                 the pointer.  Handle it by offsetting it by UNKNOWN.  */
4511
              get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4512
                                             NULL_TREE, &rhsc);
4513
            }
4514
          else if ((CONVERT_EXPR_CODE_P (code)
4515
                    && !(POINTER_TYPE_P (gimple_expr_type (t))
4516
                         && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4517
                   || gimple_assign_single_p (t))
4518
            get_constraint_for_rhs (rhsop, &rhsc);
4519
          else if (truth_value_p (code))
4520
            /* Truth value results are not pointer (parts).  Or at least
4521
               very very unreasonable obfuscation of a part.  */
4522
            ;
4523
          else
4524
            {
4525
              /* All other operations are merges.  */
4526
              VEC (ce_s, heap) *tmp = NULL;
4527
              struct constraint_expr *rhsp;
4528
              unsigned i, j;
4529
              get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4530
              for (i = 2; i < gimple_num_ops (t); ++i)
4531
                {
4532
                  get_constraint_for_rhs (gimple_op (t, i), &tmp);
4533
                  FOR_EACH_VEC_ELT (ce_s, tmp, j, rhsp)
4534
                    VEC_safe_push (ce_s, heap, rhsc, rhsp);
4535
                  VEC_truncate (ce_s, tmp, 0);
4536
                }
4537
              VEC_free (ce_s, heap, tmp);
4538
            }
4539
          process_all_all_constraints (lhsc, rhsc);
4540
        }
4541
      /* If there is a store to a global variable the rhs escapes.  */
4542
      if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4543
          && DECL_P (lhsop)
4544
          && is_global_var (lhsop)
4545
          && (!in_ipa_mode
4546
              || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4547
        make_escape_constraint (rhsop);
4548
    }
4549
  /* Handle escapes through return.  */
4550
  else if (gimple_code (t) == GIMPLE_RETURN
4551
           && gimple_return_retval (t) != NULL_TREE)
4552
    {
4553
      fi = NULL;
4554
      if (!in_ipa_mode
4555
          || !(fi = get_vi_for_tree (cfun->decl)))
4556
        make_escape_constraint (gimple_return_retval (t));
4557
      else if (in_ipa_mode
4558
               && fi != NULL)
4559
        {
4560
          struct constraint_expr lhs ;
4561
          struct constraint_expr *rhsp;
4562
          unsigned i;
4563
 
4564
          lhs = get_function_part_constraint (fi, fi_result);
4565
          get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
4566
          FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
4567
            process_constraint (new_constraint (lhs, *rhsp));
4568
        }
4569
    }
4570
  /* Handle asms conservatively by adding escape constraints to everything.  */
4571
  else if (gimple_code (t) == GIMPLE_ASM)
4572
    {
4573
      unsigned i, noutputs;
4574
      const char **oconstraints;
4575
      const char *constraint;
4576
      bool allows_mem, allows_reg, is_inout;
4577
 
4578
      noutputs = gimple_asm_noutputs (t);
4579
      oconstraints = XALLOCAVEC (const char *, noutputs);
4580
 
4581
      for (i = 0; i < noutputs; ++i)
4582
        {
4583
          tree link = gimple_asm_output_op (t, i);
4584
          tree op = TREE_VALUE (link);
4585
 
4586
          constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4587
          oconstraints[i] = constraint;
4588
          parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4589
                                   &allows_reg, &is_inout);
4590
 
4591
          /* A memory constraint makes the address of the operand escape.  */
4592
          if (!allows_reg && allows_mem)
4593
            make_escape_constraint (build_fold_addr_expr (op));
4594
 
4595
          /* The asm may read global memory, so outputs may point to
4596
             any global memory.  */
4597
          if (op)
4598
            {
4599
              VEC(ce_s, heap) *lhsc = NULL;
4600
              struct constraint_expr rhsc, *lhsp;
4601
              unsigned j;
4602
              get_constraint_for (op, &lhsc);
4603
              rhsc.var = nonlocal_id;
4604
              rhsc.offset = 0;
4605
              rhsc.type = SCALAR;
4606
              FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
4607
                process_constraint (new_constraint (*lhsp, rhsc));
4608
              VEC_free (ce_s, heap, lhsc);
4609
            }
4610
        }
4611
      for (i = 0; i < gimple_asm_ninputs (t); ++i)
4612
        {
4613
          tree link = gimple_asm_input_op (t, i);
4614
          tree op = TREE_VALUE (link);
4615
 
4616
          constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4617
 
4618
          parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4619
                                  &allows_mem, &allows_reg);
4620
 
4621
          /* A memory constraint makes the address of the operand escape.  */
4622
          if (!allows_reg && allows_mem)
4623
            make_escape_constraint (build_fold_addr_expr (op));
4624
          /* Strictly we'd only need the constraint to ESCAPED if
4625
             the asm clobbers memory, otherwise using something
4626
             along the lines of per-call clobbers/uses would be enough.  */
4627
          else if (op)
4628
            make_escape_constraint (op);
4629
        }
4630
    }
4631
 
4632
  VEC_free (ce_s, heap, rhsc);
4633
  VEC_free (ce_s, heap, lhsc);
4634
}
4635
 
4636
 
4637
/* Create a constraint adding to the clobber set of FI the memory
4638
   pointed to by PTR.  */
4639
 
4640
static void
4641
process_ipa_clobber (varinfo_t fi, tree ptr)
4642
{
4643
  VEC(ce_s, heap) *ptrc = NULL;
4644
  struct constraint_expr *c, lhs;
4645
  unsigned i;
4646
  get_constraint_for_rhs (ptr, &ptrc);
4647
  lhs = get_function_part_constraint (fi, fi_clobbers);
4648
  FOR_EACH_VEC_ELT (ce_s, ptrc, i, c)
4649
    process_constraint (new_constraint (lhs, *c));
4650
  VEC_free (ce_s, heap, ptrc);
4651
}
4652
 
4653
/* Walk statement T setting up clobber and use constraints according to the
4654
   references found in T.  This function is a main part of the
4655
   IPA constraint builder.  */
4656
 
4657
static void
4658
find_func_clobbers (gimple origt)
4659
{
4660
  gimple t = origt;
4661
  VEC(ce_s, heap) *lhsc = NULL;
4662
  VEC(ce_s, heap) *rhsc = NULL;
4663
  varinfo_t fi;
4664
 
4665
  /* Add constraints for clobbered/used in IPA mode.
4666
     We are not interested in what automatic variables are clobbered
4667
     or used as we only use the information in the caller to which
4668
     they do not escape.  */
4669
  gcc_assert (in_ipa_mode);
4670
 
4671
  /* If the stmt refers to memory in any way it better had a VUSE.  */
4672
  if (gimple_vuse (t) == NULL_TREE)
4673
    return;
4674
 
4675
  /* We'd better have function information for the current function.  */
4676
  fi = lookup_vi_for_tree (cfun->decl);
4677
  gcc_assert (fi != NULL);
4678
 
4679
  /* Account for stores in assignments and calls.  */
4680
  if (gimple_vdef (t) != NULL_TREE
4681
      && gimple_has_lhs (t))
4682
    {
4683
      tree lhs = gimple_get_lhs (t);
4684
      tree tem = lhs;
4685
      while (handled_component_p (tem))
4686
        tem = TREE_OPERAND (tem, 0);
4687
      if ((DECL_P (tem)
4688
           && !auto_var_in_fn_p (tem, cfun->decl))
4689
          || INDIRECT_REF_P (tem)
4690
          || (TREE_CODE (tem) == MEM_REF
4691
              && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4692
                   && auto_var_in_fn_p
4693
                        (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4694
        {
4695
          struct constraint_expr lhsc, *rhsp;
4696
          unsigned i;
4697
          lhsc = get_function_part_constraint (fi, fi_clobbers);
4698
          get_constraint_for_address_of (lhs, &rhsc);
4699
          FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
4700
            process_constraint (new_constraint (lhsc, *rhsp));
4701
          VEC_free (ce_s, heap, rhsc);
4702
        }
4703
    }
4704
 
4705
  /* Account for uses in assigments and returns.  */
4706
  if (gimple_assign_single_p (t)
4707
      || (gimple_code (t) == GIMPLE_RETURN
4708
          && gimple_return_retval (t) != NULL_TREE))
4709
    {
4710
      tree rhs = (gimple_assign_single_p (t)
4711
                  ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4712
      tree tem = rhs;
4713
      while (handled_component_p (tem))
4714
        tem = TREE_OPERAND (tem, 0);
4715
      if ((DECL_P (tem)
4716
           && !auto_var_in_fn_p (tem, cfun->decl))
4717
          || INDIRECT_REF_P (tem)
4718
          || (TREE_CODE (tem) == MEM_REF
4719
              && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4720
                   && auto_var_in_fn_p
4721
                        (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4722
        {
4723
          struct constraint_expr lhs, *rhsp;
4724
          unsigned i;
4725
          lhs = get_function_part_constraint (fi, fi_uses);
4726
          get_constraint_for_address_of (rhs, &rhsc);
4727
          FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
4728
            process_constraint (new_constraint (lhs, *rhsp));
4729
          VEC_free (ce_s, heap, rhsc);
4730
        }
4731
    }
4732
 
4733
  if (is_gimple_call (t))
4734
    {
4735
      varinfo_t cfi = NULL;
4736
      tree decl = gimple_call_fndecl (t);
4737
      struct constraint_expr lhs, rhs;
4738
      unsigned i, j;
4739
 
4740
      /* For builtins we do not have separate function info.  For those
4741
         we do not generate escapes for we have to generate clobbers/uses.  */
4742
      if (decl
4743
          && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
4744
        switch (DECL_FUNCTION_CODE (decl))
4745
          {
4746
          /* The following functions use and clobber memory pointed to
4747
             by their arguments.  */
4748
          case BUILT_IN_STRCPY:
4749
          case BUILT_IN_STRNCPY:
4750
          case BUILT_IN_BCOPY:
4751
          case BUILT_IN_MEMCPY:
4752
          case BUILT_IN_MEMMOVE:
4753
          case BUILT_IN_MEMPCPY:
4754
          case BUILT_IN_STPCPY:
4755
          case BUILT_IN_STPNCPY:
4756
          case BUILT_IN_STRCAT:
4757
          case BUILT_IN_STRNCAT:
4758
          case BUILT_IN_STRCPY_CHK:
4759
          case BUILT_IN_STRNCPY_CHK:
4760
          case BUILT_IN_MEMCPY_CHK:
4761
          case BUILT_IN_MEMMOVE_CHK:
4762
          case BUILT_IN_MEMPCPY_CHK:
4763
          case BUILT_IN_STPCPY_CHK:
4764
          case BUILT_IN_STPNCPY_CHK:
4765
          case BUILT_IN_STRCAT_CHK:
4766
          case BUILT_IN_STRNCAT_CHK:
4767
            {
4768
              tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4769
                                               == BUILT_IN_BCOPY ? 1 : 0));
4770
              tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4771
                                              == BUILT_IN_BCOPY ? 0 : 1));
4772
              unsigned i;
4773
              struct constraint_expr *rhsp, *lhsp;
4774
              get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4775
              lhs = get_function_part_constraint (fi, fi_clobbers);
4776
              FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
4777
                process_constraint (new_constraint (lhs, *lhsp));
4778
              VEC_free (ce_s, heap, lhsc);
4779
              get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4780
              lhs = get_function_part_constraint (fi, fi_uses);
4781
              FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
4782
                process_constraint (new_constraint (lhs, *rhsp));
4783
              VEC_free (ce_s, heap, rhsc);
4784
              return;
4785
            }
4786
          /* The following function clobbers memory pointed to by
4787
             its argument.  */
4788
          case BUILT_IN_MEMSET:
4789
          case BUILT_IN_MEMSET_CHK:
4790
            {
4791
              tree dest = gimple_call_arg (t, 0);
4792
              unsigned i;
4793
              ce_s *lhsp;
4794
              get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4795
              lhs = get_function_part_constraint (fi, fi_clobbers);
4796
              FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
4797
                process_constraint (new_constraint (lhs, *lhsp));
4798
              VEC_free (ce_s, heap, lhsc);
4799
              return;
4800
            }
4801
          /* The following functions clobber their second and third
4802
             arguments.  */
4803
          case BUILT_IN_SINCOS:
4804
          case BUILT_IN_SINCOSF:
4805
          case BUILT_IN_SINCOSL:
4806
            {
4807
              process_ipa_clobber (fi, gimple_call_arg (t, 1));
4808
              process_ipa_clobber (fi, gimple_call_arg (t, 2));
4809
              return;
4810
            }
4811
          /* The following functions clobber their second argument.  */
4812
          case BUILT_IN_FREXP:
4813
          case BUILT_IN_FREXPF:
4814
          case BUILT_IN_FREXPL:
4815
          case BUILT_IN_LGAMMA_R:
4816
          case BUILT_IN_LGAMMAF_R:
4817
          case BUILT_IN_LGAMMAL_R:
4818
          case BUILT_IN_GAMMA_R:
4819
          case BUILT_IN_GAMMAF_R:
4820
          case BUILT_IN_GAMMAL_R:
4821
          case BUILT_IN_MODF:
4822
          case BUILT_IN_MODFF:
4823
          case BUILT_IN_MODFL:
4824
            {
4825
              process_ipa_clobber (fi, gimple_call_arg (t, 1));
4826
              return;
4827
            }
4828
          /* The following functions clobber their third argument.  */
4829
          case BUILT_IN_REMQUO:
4830
          case BUILT_IN_REMQUOF:
4831
          case BUILT_IN_REMQUOL:
4832
            {
4833
              process_ipa_clobber (fi, gimple_call_arg (t, 2));
4834
              return;
4835
            }
4836
          /* The following functions neither read nor clobber memory.  */
4837
          case BUILT_IN_ASSUME_ALIGNED:
4838
          case BUILT_IN_FREE:
4839
            return;
4840
          /* Trampolines are of no interest to us.  */
4841
          case BUILT_IN_INIT_TRAMPOLINE:
4842
          case BUILT_IN_ADJUST_TRAMPOLINE:
4843
            return;
4844
          case BUILT_IN_VA_START:
4845
          case BUILT_IN_VA_END:
4846
            return;
4847
          /* printf-style functions may have hooks to set pointers to
4848
             point to somewhere into the generated string.  Leave them
4849
             for a later excercise...  */
4850
          default:
4851
            /* Fallthru to general call handling.  */;
4852
          }
4853
 
4854
      /* Parameters passed by value are used.  */
4855
      lhs = get_function_part_constraint (fi, fi_uses);
4856
      for (i = 0; i < gimple_call_num_args (t); i++)
4857
        {
4858
          struct constraint_expr *rhsp;
4859
          tree arg = gimple_call_arg (t, i);
4860
 
4861
          if (TREE_CODE (arg) == SSA_NAME
4862
              || is_gimple_min_invariant (arg))
4863
            continue;
4864
 
4865
          get_constraint_for_address_of (arg, &rhsc);
4866
          FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
4867
            process_constraint (new_constraint (lhs, *rhsp));
4868
          VEC_free (ce_s, heap, rhsc);
4869
        }
4870
 
4871
      /* Build constraints for propagating clobbers/uses along the
4872
         callgraph edges.  */
4873
      cfi = get_fi_for_callee (t);
4874
      if (cfi->id == anything_id)
4875
        {
4876
          if (gimple_vdef (t))
4877
            make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4878
                                  anything_id);
4879
          make_constraint_from (first_vi_for_offset (fi, fi_uses),
4880
                                anything_id);
4881
          return;
4882
        }
4883
 
4884
      /* For callees without function info (that's external functions),
4885
         ESCAPED is clobbered and used.  */
4886
      if (gimple_call_fndecl (t)
4887
          && !cfi->is_fn_info)
4888
        {
4889
          varinfo_t vi;
4890
 
4891
          if (gimple_vdef (t))
4892
            make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4893
                                  escaped_id);
4894
          make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
4895
 
4896
          /* Also honor the call statement use/clobber info.  */
4897
          if ((vi = lookup_call_clobber_vi (t)) != NULL)
4898
            make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4899
                                  vi->id);
4900
          if ((vi = lookup_call_use_vi (t)) != NULL)
4901
            make_copy_constraint (first_vi_for_offset (fi, fi_uses),
4902
                                  vi->id);
4903
          return;
4904
        }
4905
 
4906
      /* Otherwise the caller clobbers and uses what the callee does.
4907
         ???  This should use a new complex constraint that filters
4908
         local variables of the callee.  */
4909
      if (gimple_vdef (t))
4910
        {
4911
          lhs = get_function_part_constraint (fi, fi_clobbers);
4912
          rhs = get_function_part_constraint (cfi, fi_clobbers);
4913
          process_constraint (new_constraint (lhs, rhs));
4914
        }
4915
      lhs = get_function_part_constraint (fi, fi_uses);
4916
      rhs = get_function_part_constraint (cfi, fi_uses);
4917
      process_constraint (new_constraint (lhs, rhs));
4918
    }
4919
  else if (gimple_code (t) == GIMPLE_ASM)
4920
    {
4921
      /* ???  Ick.  We can do better.  */
4922
      if (gimple_vdef (t))
4923
        make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4924
                              anything_id);
4925
      make_constraint_from (first_vi_for_offset (fi, fi_uses),
4926
                            anything_id);
4927
    }
4928
 
4929
  VEC_free (ce_s, heap, rhsc);
4930
}
4931
 
4932
 
4933
/* Find the first varinfo in the same variable as START that overlaps with
4934
   OFFSET.  Return NULL if we can't find one.  */
4935
 
4936
static varinfo_t
4937
first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
4938
{
4939
  /* If the offset is outside of the variable, bail out.  */
4940
  if (offset >= start->fullsize)
4941
    return NULL;
4942
 
4943
  /* If we cannot reach offset from start, lookup the first field
4944
     and start from there.  */
4945
  if (start->offset > offset)
4946
    start = lookup_vi_for_tree (start->decl);
4947
 
4948
  while (start)
4949
    {
4950
      /* We may not find a variable in the field list with the actual
4951
         offset when when we have glommed a structure to a variable.
4952
         In that case, however, offset should still be within the size
4953
         of the variable. */
4954
      if (offset >= start->offset
4955
          && (offset - start->offset) < start->size)
4956
        return start;
4957
 
4958
      start= start->next;
4959
    }
4960
 
4961
  return NULL;
4962
}
4963
 
4964
/* Find the first varinfo in the same variable as START that overlaps with
4965
   OFFSET.  If there is no such varinfo the varinfo directly preceding
4966
   OFFSET is returned.  */
4967
 
4968
static varinfo_t
4969
first_or_preceding_vi_for_offset (varinfo_t start,
4970
                                  unsigned HOST_WIDE_INT offset)
4971
{
4972
  /* If we cannot reach offset from start, lookup the first field
4973
     and start from there.  */
4974
  if (start->offset > offset)
4975
    start = lookup_vi_for_tree (start->decl);
4976
 
4977
  /* We may not find a variable in the field list with the actual
4978
     offset when when we have glommed a structure to a variable.
4979
     In that case, however, offset should still be within the size
4980
     of the variable.
4981
     If we got beyond the offset we look for return the field
4982
     directly preceding offset which may be the last field.  */
4983
  while (start->next
4984
         && offset >= start->offset
4985
         && !((offset - start->offset) < start->size))
4986
    start = start->next;
4987
 
4988
  return start;
4989
}
4990
 
4991
 
4992
/* This structure is used during pushing fields onto the fieldstack
4993
   to track the offset of the field, since bitpos_of_field gives it
4994
   relative to its immediate containing type, and we want it relative
4995
   to the ultimate containing object.  */
4996
 
4997
struct fieldoff
4998
{
4999
  /* Offset from the base of the base containing object to this field.  */
5000
  HOST_WIDE_INT offset;
5001
 
5002
  /* Size, in bits, of the field.  */
5003
  unsigned HOST_WIDE_INT size;
5004
 
5005
  unsigned has_unknown_size : 1;
5006
 
5007
  unsigned must_have_pointers : 1;
5008
 
5009
  unsigned may_have_pointers : 1;
5010
 
5011
  unsigned only_restrict_pointers : 1;
5012
};
5013
typedef struct fieldoff fieldoff_s;
5014
 
5015
DEF_VEC_O(fieldoff_s);
5016
DEF_VEC_ALLOC_O(fieldoff_s,heap);
5017
 
5018
/* qsort comparison function for two fieldoff's PA and PB */
5019
 
5020
static int
5021
fieldoff_compare (const void *pa, const void *pb)
5022
{
5023
  const fieldoff_s *foa = (const fieldoff_s *)pa;
5024
  const fieldoff_s *fob = (const fieldoff_s *)pb;
5025
  unsigned HOST_WIDE_INT foasize, fobsize;
5026
 
5027
  if (foa->offset < fob->offset)
5028
    return -1;
5029
  else if (foa->offset > fob->offset)
5030
    return 1;
5031
 
5032
  foasize = foa->size;
5033
  fobsize = fob->size;
5034
  if (foasize < fobsize)
5035
    return -1;
5036
  else if (foasize > fobsize)
5037
    return 1;
5038
  return 0;
5039
}
5040
 
5041
/* Sort a fieldstack according to the field offset and sizes.  */
5042
static void
5043
sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
5044
{
5045
  VEC_qsort (fieldoff_s, fieldstack, fieldoff_compare);
5046
}
5047
 
5048
/* Return true if T is a type that can have subvars.  */
5049
 
5050
static inline bool
5051
type_can_have_subvars (const_tree t)
5052
{
5053
  /* Aggregates without overlapping fields can have subvars.  */
5054
  return TREE_CODE (t) == RECORD_TYPE;
5055
}
5056
 
5057
/* Return true if V is a tree that we can have subvars for.
5058
   Normally, this is any aggregate type.  Also complex
5059
   types which are not gimple registers can have subvars.  */
5060
 
5061
static inline bool
5062
var_can_have_subvars (const_tree v)
5063
{
5064
  /* Volatile variables should never have subvars.  */
5065
  if (TREE_THIS_VOLATILE (v))
5066
    return false;
5067
 
5068
  /* Non decls or memory tags can never have subvars.  */
5069
  if (!DECL_P (v))
5070
    return false;
5071
 
5072
  return type_can_have_subvars (TREE_TYPE (v));
5073
}
5074
 
5075
/* Return true if T is a type that does contain pointers.  */
5076
 
5077
static bool
5078
type_must_have_pointers (tree type)
5079
{
5080
  if (POINTER_TYPE_P (type))
5081
    return true;
5082
 
5083
  if (TREE_CODE (type) == ARRAY_TYPE)
5084
    return type_must_have_pointers (TREE_TYPE (type));
5085
 
5086
  /* A function or method can have pointers as arguments, so track
5087
     those separately.  */
5088
  if (TREE_CODE (type) == FUNCTION_TYPE
5089
      || TREE_CODE (type) == METHOD_TYPE)
5090
    return true;
5091
 
5092
  return false;
5093
}
5094
 
5095
static bool
5096
field_must_have_pointers (tree t)
5097
{
5098
  return type_must_have_pointers (TREE_TYPE (t));
5099
}
5100
 
5101
/* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5102
   the fields of TYPE onto fieldstack, recording their offsets along
5103
   the way.
5104
 
5105
   OFFSET is used to keep track of the offset in this entire
5106
   structure, rather than just the immediately containing structure.
5107
   Returns false if the caller is supposed to handle the field we
5108
   recursed for.  */
5109
 
5110
static bool
5111
push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
5112
                             HOST_WIDE_INT offset)
5113
{
5114
  tree field;
5115
  bool empty_p = true;
5116
 
5117
  if (TREE_CODE (type) != RECORD_TYPE)
5118
    return false;
5119
 
5120
  /* If the vector of fields is growing too big, bail out early.
5121
     Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5122
     sure this fails.  */
5123
  if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5124
    return false;
5125
 
5126
  for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5127
    if (TREE_CODE (field) == FIELD_DECL)
5128
      {
5129
        bool push = false;
5130
        HOST_WIDE_INT foff = bitpos_of_field (field);
5131
 
5132
        if (!var_can_have_subvars (field)
5133
            || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5134
            || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5135
          push = true;
5136
        else if (!push_fields_onto_fieldstack
5137
                    (TREE_TYPE (field), fieldstack, offset + foff)
5138
                 && (DECL_SIZE (field)
5139
                     && !integer_zerop (DECL_SIZE (field))))
5140
          /* Empty structures may have actual size, like in C++.  So
5141
             see if we didn't push any subfields and the size is
5142
             nonzero, push the field onto the stack.  */
5143
          push = true;
5144
 
5145
        if (push)
5146
          {
5147
            fieldoff_s *pair = NULL;
5148
            bool has_unknown_size = false;
5149
            bool must_have_pointers_p;
5150
 
5151
            if (!VEC_empty (fieldoff_s, *fieldstack))
5152
              pair = VEC_last (fieldoff_s, *fieldstack);
5153
 
5154
            /* If there isn't anything at offset zero, create sth.  */
5155
            if (!pair
5156
                && offset + foff != 0)
5157
              {
5158
                pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
5159
                pair->offset = 0;
5160
                pair->size = offset + foff;
5161
                pair->has_unknown_size = false;
5162
                pair->must_have_pointers = false;
5163
                pair->may_have_pointers = false;
5164
                pair->only_restrict_pointers = false;
5165
              }
5166
 
5167
            if (!DECL_SIZE (field)
5168
                || !host_integerp (DECL_SIZE (field), 1))
5169
              has_unknown_size = true;
5170
 
5171
            /* If adjacent fields do not contain pointers merge them.  */
5172
            must_have_pointers_p = field_must_have_pointers (field);
5173
            if (pair
5174
                && !has_unknown_size
5175
                && !must_have_pointers_p
5176
                && !pair->must_have_pointers
5177
                && !pair->has_unknown_size
5178
                && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5179
              {
5180
                pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
5181
              }
5182
            else
5183
              {
5184
                pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
5185
                pair->offset = offset + foff;
5186
                pair->has_unknown_size = has_unknown_size;
5187
                if (!has_unknown_size)
5188
                  pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
5189
                else
5190
                  pair->size = -1;
5191
                pair->must_have_pointers = must_have_pointers_p;
5192
                pair->may_have_pointers = true;
5193
                pair->only_restrict_pointers
5194
                  = (!has_unknown_size
5195
                     && POINTER_TYPE_P (TREE_TYPE (field))
5196
                     && TYPE_RESTRICT (TREE_TYPE (field)));
5197
              }
5198
          }
5199
 
5200
        empty_p = false;
5201
      }
5202
 
5203
  return !empty_p;
5204
}
5205
 
5206
/* Count the number of arguments DECL has, and set IS_VARARGS to true
5207
   if it is a varargs function.  */
5208
 
5209
static unsigned int
5210
count_num_arguments (tree decl, bool *is_varargs)
5211
{
5212
  unsigned int num = 0;
5213
  tree t;
5214
 
5215
  /* Capture named arguments for K&R functions.  They do not
5216
     have a prototype and thus no TYPE_ARG_TYPES.  */
5217
  for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5218
    ++num;
5219
 
5220
  /* Check if the function has variadic arguments.  */
5221
  for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5222
    if (TREE_VALUE (t) == void_type_node)
5223
      break;
5224
  if (!t)
5225
    *is_varargs = true;
5226
 
5227
  return num;
5228
}
5229
 
5230
/* Creation function node for DECL, using NAME, and return the index
5231
   of the variable we've created for the function.  */
5232
 
5233
static varinfo_t
5234
create_function_info_for (tree decl, const char *name)
5235
{
5236
  struct function *fn = DECL_STRUCT_FUNCTION (decl);
5237
  varinfo_t vi, prev_vi;
5238
  tree arg;
5239
  unsigned int i;
5240
  bool is_varargs = false;
5241
  unsigned int num_args = count_num_arguments (decl, &is_varargs);
5242
 
5243
  /* Create the variable info.  */
5244
 
5245
  vi = new_var_info (decl, name);
5246
  vi->offset = 0;
5247
  vi->size = 1;
5248
  vi->fullsize = fi_parm_base + num_args;
5249
  vi->is_fn_info = 1;
5250
  vi->may_have_pointers = false;
5251
  if (is_varargs)
5252
    vi->fullsize = ~0;
5253
  insert_vi_for_tree (vi->decl, vi);
5254
 
5255
  prev_vi = vi;
5256
 
5257
  /* Create a variable for things the function clobbers and one for
5258
     things the function uses.  */
5259
    {
5260
      varinfo_t clobbervi, usevi;
5261
      const char *newname;
5262
      char *tempname;
5263
 
5264
      asprintf (&tempname, "%s.clobber", name);
5265
      newname = ggc_strdup (tempname);
5266
      free (tempname);
5267
 
5268
      clobbervi = new_var_info (NULL, newname);
5269
      clobbervi->offset = fi_clobbers;
5270
      clobbervi->size = 1;
5271
      clobbervi->fullsize = vi->fullsize;
5272
      clobbervi->is_full_var = true;
5273
      clobbervi->is_global_var = false;
5274
      gcc_assert (prev_vi->offset < clobbervi->offset);
5275
      prev_vi->next = clobbervi;
5276
      prev_vi = clobbervi;
5277
 
5278
      asprintf (&tempname, "%s.use", name);
5279
      newname = ggc_strdup (tempname);
5280
      free (tempname);
5281
 
5282
      usevi = new_var_info (NULL, newname);
5283
      usevi->offset = fi_uses;
5284
      usevi->size = 1;
5285
      usevi->fullsize = vi->fullsize;
5286
      usevi->is_full_var = true;
5287
      usevi->is_global_var = false;
5288
      gcc_assert (prev_vi->offset < usevi->offset);
5289
      prev_vi->next = usevi;
5290
      prev_vi = usevi;
5291
    }
5292
 
5293
  /* And one for the static chain.  */
5294
  if (fn->static_chain_decl != NULL_TREE)
5295
    {
5296
      varinfo_t chainvi;
5297
      const char *newname;
5298
      char *tempname;
5299
 
5300
      asprintf (&tempname, "%s.chain", name);
5301
      newname = ggc_strdup (tempname);
5302
      free (tempname);
5303
 
5304
      chainvi = new_var_info (fn->static_chain_decl, newname);
5305
      chainvi->offset = fi_static_chain;
5306
      chainvi->size = 1;
5307
      chainvi->fullsize = vi->fullsize;
5308
      chainvi->is_full_var = true;
5309
      chainvi->is_global_var = false;
5310
      gcc_assert (prev_vi->offset < chainvi->offset);
5311
      prev_vi->next = chainvi;
5312
      prev_vi = chainvi;
5313
      insert_vi_for_tree (fn->static_chain_decl, chainvi);
5314
    }
5315
 
5316
  /* Create a variable for the return var.  */
5317
  if (DECL_RESULT (decl) != NULL
5318
      || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5319
    {
5320
      varinfo_t resultvi;
5321
      const char *newname;
5322
      char *tempname;
5323
      tree resultdecl = decl;
5324
 
5325
      if (DECL_RESULT (decl))
5326
        resultdecl = DECL_RESULT (decl);
5327
 
5328
      asprintf (&tempname, "%s.result", name);
5329
      newname = ggc_strdup (tempname);
5330
      free (tempname);
5331
 
5332
      resultvi = new_var_info (resultdecl, newname);
5333
      resultvi->offset = fi_result;
5334
      resultvi->size = 1;
5335
      resultvi->fullsize = vi->fullsize;
5336
      resultvi->is_full_var = true;
5337
      if (DECL_RESULT (decl))
5338
        resultvi->may_have_pointers = true;
5339
      gcc_assert (prev_vi->offset < resultvi->offset);
5340
      prev_vi->next = resultvi;
5341
      prev_vi = resultvi;
5342
      if (DECL_RESULT (decl))
5343
        insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5344
    }
5345
 
5346
  /* Set up variables for each argument.  */
5347
  arg = DECL_ARGUMENTS (decl);
5348
  for (i = 0; i < num_args; i++)
5349
    {
5350
      varinfo_t argvi;
5351
      const char *newname;
5352
      char *tempname;
5353
      tree argdecl = decl;
5354
 
5355
      if (arg)
5356
        argdecl = arg;
5357
 
5358
      asprintf (&tempname, "%s.arg%d", name, i);
5359
      newname = ggc_strdup (tempname);
5360
      free (tempname);
5361
 
5362
      argvi = new_var_info (argdecl, newname);
5363
      argvi->offset = fi_parm_base + i;
5364
      argvi->size = 1;
5365
      argvi->is_full_var = true;
5366
      argvi->fullsize = vi->fullsize;
5367
      if (arg)
5368
        argvi->may_have_pointers = true;
5369
      gcc_assert (prev_vi->offset < argvi->offset);
5370
      prev_vi->next = argvi;
5371
      prev_vi = argvi;
5372
      if (arg)
5373
        {
5374
          insert_vi_for_tree (arg, argvi);
5375
          arg = DECL_CHAIN (arg);
5376
        }
5377
    }
5378
 
5379
  /* Add one representative for all further args.  */
5380
  if (is_varargs)
5381
    {
5382
      varinfo_t argvi;
5383
      const char *newname;
5384
      char *tempname;
5385
      tree decl;
5386
 
5387
      asprintf (&tempname, "%s.varargs", name);
5388
      newname = ggc_strdup (tempname);
5389
      free (tempname);
5390
 
5391
      /* We need sth that can be pointed to for va_start.  */
5392
      decl = build_fake_var_decl (ptr_type_node);
5393
 
5394
      argvi = new_var_info (decl, newname);
5395
      argvi->offset = fi_parm_base + num_args;
5396
      argvi->size = ~0;
5397
      argvi->is_full_var = true;
5398
      argvi->is_heap_var = true;
5399
      argvi->fullsize = vi->fullsize;
5400
      gcc_assert (prev_vi->offset < argvi->offset);
5401
      prev_vi->next = argvi;
5402
      prev_vi = argvi;
5403
    }
5404
 
5405
  return vi;
5406
}
5407
 
5408
 
5409
/* Return true if FIELDSTACK contains fields that overlap.
5410
   FIELDSTACK is assumed to be sorted by offset.  */
5411
 
5412
static bool
5413
check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
5414
{
5415
  fieldoff_s *fo = NULL;
5416
  unsigned int i;
5417
  HOST_WIDE_INT lastoffset = -1;
5418
 
5419
  FOR_EACH_VEC_ELT (fieldoff_s, fieldstack, i, fo)
5420
    {
5421
      if (fo->offset == lastoffset)
5422
        return true;
5423
      lastoffset = fo->offset;
5424
    }
5425
  return false;
5426
}
5427
 
5428
/* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5429
   This will also create any varinfo structures necessary for fields
5430
   of DECL.  */
5431
 
5432
static varinfo_t
5433
create_variable_info_for_1 (tree decl, const char *name)
5434
{
5435
  varinfo_t vi, newvi;
5436
  tree decl_type = TREE_TYPE (decl);
5437
  tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5438
  VEC (fieldoff_s,heap) *fieldstack = NULL;
5439
  fieldoff_s *fo;
5440
  unsigned int i;
5441
 
5442
  if (!declsize
5443
      || !host_integerp (declsize, 1))
5444
    {
5445
      vi = new_var_info (decl, name);
5446
      vi->offset = 0;
5447
      vi->size = ~0;
5448
      vi->fullsize = ~0;
5449
      vi->is_unknown_size_var = true;
5450
      vi->is_full_var = true;
5451
      vi->may_have_pointers = true;
5452
      return vi;
5453
    }
5454
 
5455
  /* Collect field information.  */
5456
  if (use_field_sensitive
5457
      && var_can_have_subvars (decl)
5458
      /* ???  Force us to not use subfields for global initializers
5459
         in IPA mode.  Else we'd have to parse arbitrary initializers.  */
5460
      && !(in_ipa_mode
5461
           && is_global_var (decl)
5462
           && DECL_INITIAL (decl)))
5463
    {
5464
      fieldoff_s *fo = NULL;
5465
      bool notokay = false;
5466
      unsigned int i;
5467
 
5468
      push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5469
 
5470
      for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
5471
        if (fo->has_unknown_size
5472
            || fo->offset < 0)
5473
          {
5474
            notokay = true;
5475
            break;
5476
          }
5477
 
5478
      /* We can't sort them if we have a field with a variable sized type,
5479
         which will make notokay = true.  In that case, we are going to return
5480
         without creating varinfos for the fields anyway, so sorting them is a
5481
         waste to boot.  */
5482
      if (!notokay)
5483
        {
5484
          sort_fieldstack (fieldstack);
5485
          /* Due to some C++ FE issues, like PR 22488, we might end up
5486
             what appear to be overlapping fields even though they,
5487
             in reality, do not overlap.  Until the C++ FE is fixed,
5488
             we will simply disable field-sensitivity for these cases.  */
5489
          notokay = check_for_overlaps (fieldstack);
5490
        }
5491
 
5492
      if (notokay)
5493
        VEC_free (fieldoff_s, heap, fieldstack);
5494
    }
5495
 
5496
  /* If we didn't end up collecting sub-variables create a full
5497
     variable for the decl.  */
5498
  if (VEC_length (fieldoff_s, fieldstack) <= 1
5499
      || VEC_length (fieldoff_s, fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5500
    {
5501
      vi = new_var_info (decl, name);
5502
      vi->offset = 0;
5503
      vi->may_have_pointers = true;
5504
      vi->fullsize = TREE_INT_CST_LOW (declsize);
5505
      vi->size = vi->fullsize;
5506
      vi->is_full_var = true;
5507
      VEC_free (fieldoff_s, heap, fieldstack);
5508
      return vi;
5509
    }
5510
 
5511
  vi = new_var_info (decl, name);
5512
  vi->fullsize = TREE_INT_CST_LOW (declsize);
5513
  for (i = 0, newvi = vi;
5514
       VEC_iterate (fieldoff_s, fieldstack, i, fo);
5515
       ++i, newvi = newvi->next)
5516
    {
5517
      const char *newname = "NULL";
5518
      char *tempname;
5519
 
5520
      if (dump_file)
5521
        {
5522
          asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5523
                    "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5524
          newname = ggc_strdup (tempname);
5525
          free (tempname);
5526
        }
5527
      newvi->name = newname;
5528
      newvi->offset = fo->offset;
5529
      newvi->size = fo->size;
5530
      newvi->fullsize = vi->fullsize;
5531
      newvi->may_have_pointers = fo->may_have_pointers;
5532
      newvi->only_restrict_pointers = fo->only_restrict_pointers;
5533
      if (i + 1 < VEC_length (fieldoff_s, fieldstack))
5534
        newvi->next = new_var_info (decl, name);
5535
    }
5536
 
5537
  VEC_free (fieldoff_s, heap, fieldstack);
5538
 
5539
  return vi;
5540
}
5541
 
5542
static unsigned int
5543
create_variable_info_for (tree decl, const char *name)
5544
{
5545
  varinfo_t vi = create_variable_info_for_1 (decl, name);
5546
  unsigned int id = vi->id;
5547
 
5548
  insert_vi_for_tree (decl, vi);
5549
 
5550
  if (TREE_CODE (decl) != VAR_DECL)
5551
    return id;
5552
 
5553
  /* Create initial constraints for globals.  */
5554
  for (; vi; vi = vi->next)
5555
    {
5556
      if (!vi->may_have_pointers
5557
          || !vi->is_global_var)
5558
        continue;
5559
 
5560
      /* Mark global restrict qualified pointers.  */
5561
      if ((POINTER_TYPE_P (TREE_TYPE (decl))
5562
           && TYPE_RESTRICT (TREE_TYPE (decl)))
5563
          || vi->only_restrict_pointers)
5564
        {
5565
          make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5566
          continue;
5567
        }
5568
 
5569
      /* In non-IPA mode the initializer from nonlocal is all we need.  */
5570
      if (!in_ipa_mode
5571
          || DECL_HARD_REGISTER (decl))
5572
        make_copy_constraint (vi, nonlocal_id);
5573
 
5574
      /* In IPA mode parse the initializer and generate proper constraints
5575
         for it.  */
5576
      else
5577
        {
5578
          struct varpool_node *vnode = varpool_get_node (decl);
5579
 
5580
          /* For escaped variables initialize them from nonlocal.  */
5581
          if (!varpool_all_refs_explicit_p (vnode))
5582
            make_copy_constraint (vi, nonlocal_id);
5583
 
5584
          /* If this is a global variable with an initializer and we are in
5585
             IPA mode generate constraints for it.  */
5586
          if (DECL_INITIAL (decl))
5587
            {
5588
              VEC (ce_s, heap) *rhsc = NULL;
5589
              struct constraint_expr lhs, *rhsp;
5590
              unsigned i;
5591
              get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
5592
              lhs.var = vi->id;
5593
              lhs.offset = 0;
5594
              lhs.type = SCALAR;
5595
              FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
5596
                process_constraint (new_constraint (lhs, *rhsp));
5597
              /* If this is a variable that escapes from the unit
5598
                 the initializer escapes as well.  */
5599
              if (!varpool_all_refs_explicit_p (vnode))
5600
                {
5601
                  lhs.var = escaped_id;
5602
                  lhs.offset = 0;
5603
                  lhs.type = SCALAR;
5604
                  FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
5605
                    process_constraint (new_constraint (lhs, *rhsp));
5606
                }
5607
              VEC_free (ce_s, heap, rhsc);
5608
            }
5609
        }
5610
    }
5611
 
5612
  return id;
5613
}
5614
 
5615
/* Print out the points-to solution for VAR to FILE.  */
5616
 
5617
static void
5618
dump_solution_for_var (FILE *file, unsigned int var)
5619
{
5620
  varinfo_t vi = get_varinfo (var);
5621
  unsigned int i;
5622
  bitmap_iterator bi;
5623
 
5624
  /* Dump the solution for unified vars anyway, this avoids difficulties
5625
     in scanning dumps in the testsuite.  */
5626
  fprintf (file, "%s = { ", vi->name);
5627
  vi = get_varinfo (find (var));
5628
  EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5629
    fprintf (file, "%s ", get_varinfo (i)->name);
5630
  fprintf (file, "}");
5631
 
5632
  /* But note when the variable was unified.  */
5633
  if (vi->id != var)
5634
    fprintf (file, " same as %s", vi->name);
5635
 
5636
  fprintf (file, "\n");
5637
}
5638
 
5639
/* Print the points-to solution for VAR to stdout.  */
5640
 
5641
DEBUG_FUNCTION void
5642
debug_solution_for_var (unsigned int var)
5643
{
5644
  dump_solution_for_var (stdout, var);
5645
}
5646
 
5647
/* Create varinfo structures for all of the variables in the
5648
   function for intraprocedural mode.  */
5649
 
5650
static void
5651
intra_create_variable_infos (void)
5652
{
5653
  tree t;
5654
 
5655
  /* For each incoming pointer argument arg, create the constraint ARG
5656
     = NONLOCAL or a dummy variable if it is a restrict qualified
5657
     passed-by-reference argument.  */
5658
  for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5659
    {
5660
      varinfo_t p = get_vi_for_tree (t);
5661
 
5662
      /* For restrict qualified pointers to objects passed by
5663
         reference build a real representative for the pointed-to object.
5664
         Treat restrict qualified references the same.  */
5665
      if (TYPE_RESTRICT (TREE_TYPE (t))
5666
          && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5667
              || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5668
          && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5669
        {
5670
          struct constraint_expr lhsc, rhsc;
5671
          varinfo_t vi;
5672
          tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5673
          DECL_EXTERNAL (heapvar) = 1;
5674
          vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5675
          insert_vi_for_tree (heapvar, vi);
5676
          lhsc.var = p->id;
5677
          lhsc.type = SCALAR;
5678
          lhsc.offset = 0;
5679
          rhsc.var = vi->id;
5680
          rhsc.type = ADDRESSOF;
5681
          rhsc.offset = 0;
5682
          process_constraint (new_constraint (lhsc, rhsc));
5683
          for (; vi; vi = vi->next)
5684
            if (vi->may_have_pointers)
5685
              {
5686
                if (vi->only_restrict_pointers)
5687
                  make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5688
                else
5689
                  make_copy_constraint (vi, nonlocal_id);
5690
              }
5691
          continue;
5692
        }
5693
 
5694
      if (POINTER_TYPE_P (TREE_TYPE (t))
5695
          && TYPE_RESTRICT (TREE_TYPE (t)))
5696
        make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5697
      else
5698
        {
5699
          for (; p; p = p->next)
5700
            {
5701
              if (p->only_restrict_pointers)
5702
                make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5703
              else if (p->may_have_pointers)
5704
                make_constraint_from (p, nonlocal_id);
5705
            }
5706
        }
5707
    }
5708
 
5709
  /* Add a constraint for a result decl that is passed by reference.  */
5710
  if (DECL_RESULT (cfun->decl)
5711
      && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5712
    {
5713
      varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5714
 
5715
      for (p = result_vi; p; p = p->next)
5716
        make_constraint_from (p, nonlocal_id);
5717
    }
5718
 
5719
  /* Add a constraint for the incoming static chain parameter.  */
5720
  if (cfun->static_chain_decl != NULL_TREE)
5721
    {
5722
      varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5723
 
5724
      for (p = chain_vi; p; p = p->next)
5725
        make_constraint_from (p, nonlocal_id);
5726
    }
5727
}
5728
 
5729
/* Structure used to put solution bitmaps in a hashtable so they can
5730
   be shared among variables with the same points-to set.  */
5731
 
5732
typedef struct shared_bitmap_info
5733
{
5734
  bitmap pt_vars;
5735
  hashval_t hashcode;
5736
} *shared_bitmap_info_t;
5737
typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5738
 
5739
static htab_t shared_bitmap_table;
5740
 
5741
/* Hash function for a shared_bitmap_info_t */
5742
 
5743
static hashval_t
5744
shared_bitmap_hash (const void *p)
5745
{
5746
  const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
5747
  return bi->hashcode;
5748
}
5749
 
5750
/* Equality function for two shared_bitmap_info_t's. */
5751
 
5752
static int
5753
shared_bitmap_eq (const void *p1, const void *p2)
5754
{
5755
  const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
5756
  const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
5757
  return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5758
}
5759
 
5760
/* Lookup a bitmap in the shared bitmap hashtable, and return an already
5761
   existing instance if there is one, NULL otherwise.  */
5762
 
5763
static bitmap
5764
shared_bitmap_lookup (bitmap pt_vars)
5765
{
5766
  void **slot;
5767
  struct shared_bitmap_info sbi;
5768
 
5769
  sbi.pt_vars = pt_vars;
5770
  sbi.hashcode = bitmap_hash (pt_vars);
5771
 
5772
  slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
5773
                                   sbi.hashcode, NO_INSERT);
5774
  if (!slot)
5775
    return NULL;
5776
  else
5777
    return ((shared_bitmap_info_t) *slot)->pt_vars;
5778
}
5779
 
5780
 
5781
/* Add a bitmap to the shared bitmap hashtable.  */
5782
 
5783
static void
5784
shared_bitmap_add (bitmap pt_vars)
5785
{
5786
  void **slot;
5787
  shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5788
 
5789
  sbi->pt_vars = pt_vars;
5790
  sbi->hashcode = bitmap_hash (pt_vars);
5791
 
5792
  slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
5793
                                   sbi->hashcode, INSERT);
5794
  gcc_assert (!*slot);
5795
  *slot = (void *) sbi;
5796
}
5797
 
5798
 
5799
/* Set bits in INTO corresponding to the variable uids in solution set FROM.  */
5800
 
5801
static void
5802
set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5803
{
5804
  unsigned int i;
5805
  bitmap_iterator bi;
5806
 
5807
  EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5808
    {
5809
      varinfo_t vi = get_varinfo (i);
5810
 
5811
      /* The only artificial variables that are allowed in a may-alias
5812
         set are heap variables.  */
5813
      if (vi->is_artificial_var && !vi->is_heap_var)
5814
        continue;
5815
 
5816
      if (TREE_CODE (vi->decl) == VAR_DECL
5817
          || TREE_CODE (vi->decl) == PARM_DECL
5818
          || TREE_CODE (vi->decl) == RESULT_DECL)
5819
        {
5820
          /* If we are in IPA mode we will not recompute points-to
5821
             sets after inlining so make sure they stay valid.  */
5822
          if (in_ipa_mode
5823
              && !DECL_PT_UID_SET_P (vi->decl))
5824
            SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
5825
 
5826
          /* Add the decl to the points-to set.  Note that the points-to
5827
             set contains global variables.  */
5828
          bitmap_set_bit (into, DECL_PT_UID (vi->decl));
5829
          if (vi->is_global_var)
5830
            pt->vars_contains_global = true;
5831
        }
5832
    }
5833
}
5834
 
5835
 
5836
/* Compute the points-to solution *PT for the variable VI.  */
5837
 
5838
static void
5839
find_what_var_points_to (varinfo_t orig_vi, struct pt_solution *pt)
5840
{
5841
  unsigned int i;
5842
  bitmap_iterator bi;
5843
  bitmap finished_solution;
5844
  bitmap result;
5845
  varinfo_t vi;
5846
 
5847
  memset (pt, 0, sizeof (struct pt_solution));
5848
 
5849
  /* This variable may have been collapsed, let's get the real
5850
     variable.  */
5851
  vi = get_varinfo (find (orig_vi->id));
5852
 
5853
  /* Translate artificial variables into SSA_NAME_PTR_INFO
5854
     attributes.  */
5855
  EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5856
    {
5857
      varinfo_t vi = get_varinfo (i);
5858
 
5859
      if (vi->is_artificial_var)
5860
        {
5861
          if (vi->id == nothing_id)
5862
            pt->null = 1;
5863
          else if (vi->id == escaped_id)
5864
            {
5865
              if (in_ipa_mode)
5866
                pt->ipa_escaped = 1;
5867
              else
5868
                pt->escaped = 1;
5869
            }
5870
          else if (vi->id == nonlocal_id)
5871
            pt->nonlocal = 1;
5872
          else if (vi->is_heap_var)
5873
            /* We represent heapvars in the points-to set properly.  */
5874
            ;
5875
          else if (vi->id == readonly_id)
5876
            /* Nobody cares.  */
5877
            ;
5878
          else if (vi->id == anything_id
5879
                   || vi->id == integer_id)
5880
            pt->anything = 1;
5881
        }
5882
    }
5883
 
5884
  /* Instead of doing extra work, simply do not create
5885
     elaborate points-to information for pt_anything pointers.  */
5886
  if (pt->anything)
5887
    return;
5888
 
5889
  /* Share the final set of variables when possible.  */
5890
  finished_solution = BITMAP_GGC_ALLOC ();
5891
  stats.points_to_sets_created++;
5892
 
5893
  set_uids_in_ptset (finished_solution, vi->solution, pt);
5894
  result = shared_bitmap_lookup (finished_solution);
5895
  if (!result)
5896
    {
5897
      shared_bitmap_add (finished_solution);
5898
      pt->vars = finished_solution;
5899
    }
5900
  else
5901
    {
5902
      pt->vars = result;
5903
      bitmap_clear (finished_solution);
5904
    }
5905
}
5906
 
5907
/* Given a pointer variable P, fill in its points-to set.  */
5908
 
5909
static void
5910
find_what_p_points_to (tree p)
5911
{
5912
  struct ptr_info_def *pi;
5913
  tree lookup_p = p;
5914
  varinfo_t vi;
5915
 
5916
  /* For parameters, get at the points-to set for the actual parm
5917
     decl.  */
5918
  if (TREE_CODE (p) == SSA_NAME
5919
      && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
5920
          || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL)
5921
      && SSA_NAME_IS_DEFAULT_DEF (p))
5922
    lookup_p = SSA_NAME_VAR (p);
5923
 
5924
  vi = lookup_vi_for_tree (lookup_p);
5925
  if (!vi)
5926
    return;
5927
 
5928
  pi = get_ptr_info (p);
5929
  find_what_var_points_to (vi, &pi->pt);
5930
}
5931
 
5932
 
5933
/* Query statistics for points-to solutions.  */
5934
 
5935
static struct {
5936
  unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
5937
  unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
5938
  unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
5939
  unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
5940
} pta_stats;
5941
 
5942
void
5943
dump_pta_stats (FILE *s)
5944
{
5945
  fprintf (s, "\nPTA query stats:\n");
5946
  fprintf (s, "  pt_solution_includes: "
5947
           HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5948
           HOST_WIDE_INT_PRINT_DEC" queries\n",
5949
           pta_stats.pt_solution_includes_no_alias,
5950
           pta_stats.pt_solution_includes_no_alias
5951
           + pta_stats.pt_solution_includes_may_alias);
5952
  fprintf (s, "  pt_solutions_intersect: "
5953
           HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5954
           HOST_WIDE_INT_PRINT_DEC" queries\n",
5955
           pta_stats.pt_solutions_intersect_no_alias,
5956
           pta_stats.pt_solutions_intersect_no_alias
5957
           + pta_stats.pt_solutions_intersect_may_alias);
5958
}
5959
 
5960
 
5961
/* Reset the points-to solution *PT to a conservative default
5962
   (point to anything).  */
5963
 
5964
void
5965
pt_solution_reset (struct pt_solution *pt)
5966
{
5967
  memset (pt, 0, sizeof (struct pt_solution));
5968
  pt->anything = true;
5969
}
5970
 
5971
/* Set the points-to solution *PT to point only to the variables
5972
   in VARS.  VARS_CONTAINS_GLOBAL specifies whether that contains
5973
   global variables and VARS_CONTAINS_RESTRICT specifies whether
5974
   it contains restrict tag variables.  */
5975
 
5976
void
5977
pt_solution_set (struct pt_solution *pt, bitmap vars, bool vars_contains_global)
5978
{
5979
  memset (pt, 0, sizeof (struct pt_solution));
5980
  pt->vars = vars;
5981
  pt->vars_contains_global = vars_contains_global;
5982
}
5983
 
5984
/* Set the points-to solution *PT to point only to the variable VAR.  */
5985
 
5986
void
5987
pt_solution_set_var (struct pt_solution *pt, tree var)
5988
{
5989
  memset (pt, 0, sizeof (struct pt_solution));
5990
  pt->vars = BITMAP_GGC_ALLOC ();
5991
  bitmap_set_bit (pt->vars, DECL_PT_UID (var));
5992
  pt->vars_contains_global = is_global_var (var);
5993
}
5994
 
5995
/* Computes the union of the points-to solutions *DEST and *SRC and
5996
   stores the result in *DEST.  This changes the points-to bitmap
5997
   of *DEST and thus may not be used if that might be shared.
5998
   The points-to bitmap of *SRC and *DEST will not be shared after
5999
   this function if they were not before.  */
6000
 
6001
static void
6002
pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6003
{
6004
  dest->anything |= src->anything;
6005
  if (dest->anything)
6006
    {
6007
      pt_solution_reset (dest);
6008
      return;
6009
    }
6010
 
6011
  dest->nonlocal |= src->nonlocal;
6012
  dest->escaped |= src->escaped;
6013
  dest->ipa_escaped |= src->ipa_escaped;
6014
  dest->null |= src->null;
6015
  dest->vars_contains_global |= src->vars_contains_global;
6016
  if (!src->vars)
6017
    return;
6018
 
6019
  if (!dest->vars)
6020
    dest->vars = BITMAP_GGC_ALLOC ();
6021
  bitmap_ior_into (dest->vars, src->vars);
6022
}
6023
 
6024
/* Return true if the points-to solution *PT is empty.  */
6025
 
6026
bool
6027
pt_solution_empty_p (struct pt_solution *pt)
6028
{
6029
  if (pt->anything
6030
      || pt->nonlocal)
6031
    return false;
6032
 
6033
  if (pt->vars
6034
      && !bitmap_empty_p (pt->vars))
6035
    return false;
6036
 
6037
  /* If the solution includes ESCAPED, check if that is empty.  */
6038
  if (pt->escaped
6039
      && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6040
    return false;
6041
 
6042
  /* If the solution includes ESCAPED, check if that is empty.  */
6043
  if (pt->ipa_escaped
6044
      && !pt_solution_empty_p (&ipa_escaped_pt))
6045
    return false;
6046
 
6047
  return true;
6048
}
6049
 
6050
/* Return true if the points-to solution *PT only point to a single var, and
6051
   return the var uid in *UID.  */
6052
 
6053
bool
6054
pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6055
{
6056
  if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6057
      || pt->null || pt->vars == NULL
6058
      || !bitmap_single_bit_set_p (pt->vars))
6059
    return false;
6060
 
6061
  *uid = bitmap_first_set_bit (pt->vars);
6062
  return true;
6063
}
6064
 
6065
/* Return true if the points-to solution *PT includes global memory.  */
6066
 
6067
bool
6068
pt_solution_includes_global (struct pt_solution *pt)
6069
{
6070
  if (pt->anything
6071
      || pt->nonlocal
6072
      || pt->vars_contains_global)
6073
    return true;
6074
 
6075
  if (pt->escaped)
6076
    return pt_solution_includes_global (&cfun->gimple_df->escaped);
6077
 
6078
  if (pt->ipa_escaped)
6079
    return pt_solution_includes_global (&ipa_escaped_pt);
6080
 
6081
  /* ???  This predicate is not correct for the IPA-PTA solution
6082
     as we do not properly distinguish between unit escape points
6083
     and global variables.  */
6084
  if (cfun->gimple_df->ipa_pta)
6085
    return true;
6086
 
6087
  return false;
6088
}
6089
 
6090
/* Return true if the points-to solution *PT includes the variable
6091
   declaration DECL.  */
6092
 
6093
static bool
6094
pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6095
{
6096
  if (pt->anything)
6097
    return true;
6098
 
6099
  if (pt->nonlocal
6100
      && is_global_var (decl))
6101
    return true;
6102
 
6103
  if (pt->vars
6104
      && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6105
    return true;
6106
 
6107
  /* If the solution includes ESCAPED, check it.  */
6108
  if (pt->escaped
6109
      && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6110
    return true;
6111
 
6112
  /* If the solution includes ESCAPED, check it.  */
6113
  if (pt->ipa_escaped
6114
      && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6115
    return true;
6116
 
6117
  return false;
6118
}
6119
 
6120
bool
6121
pt_solution_includes (struct pt_solution *pt, const_tree decl)
6122
{
6123
  bool res = pt_solution_includes_1 (pt, decl);
6124
  if (res)
6125
    ++pta_stats.pt_solution_includes_may_alias;
6126
  else
6127
    ++pta_stats.pt_solution_includes_no_alias;
6128
  return res;
6129
}
6130
 
6131
/* Return true if both points-to solutions PT1 and PT2 have a non-empty
6132
   intersection.  */
6133
 
6134
static bool
6135
pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6136
{
6137
  if (pt1->anything || pt2->anything)
6138
    return true;
6139
 
6140
  /* If either points to unknown global memory and the other points to
6141
     any global memory they alias.  */
6142
  if ((pt1->nonlocal
6143
       && (pt2->nonlocal
6144
           || pt2->vars_contains_global))
6145
      || (pt2->nonlocal
6146
          && pt1->vars_contains_global))
6147
    return true;
6148
 
6149
  /* Check the escaped solution if required.  */
6150
  if ((pt1->escaped || pt2->escaped)
6151
      && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6152
    {
6153
      /* If both point to escaped memory and that solution
6154
         is not empty they alias.  */
6155
      if (pt1->escaped && pt2->escaped)
6156
        return true;
6157
 
6158
      /* If either points to escaped memory see if the escaped solution
6159
         intersects with the other.  */
6160
      if ((pt1->escaped
6161
           && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
6162
          || (pt2->escaped
6163
              && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
6164
        return true;
6165
    }
6166
 
6167
  /* Check the escaped solution if required.
6168
     ???  Do we need to check the local against the IPA escaped sets?  */
6169
  if ((pt1->ipa_escaped || pt2->ipa_escaped)
6170
      && !pt_solution_empty_p (&ipa_escaped_pt))
6171
    {
6172
      /* If both point to escaped memory and that solution
6173
         is not empty they alias.  */
6174
      if (pt1->ipa_escaped && pt2->ipa_escaped)
6175
        return true;
6176
 
6177
      /* If either points to escaped memory see if the escaped solution
6178
         intersects with the other.  */
6179
      if ((pt1->ipa_escaped
6180
           && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6181
          || (pt2->ipa_escaped
6182
              && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6183
        return true;
6184
    }
6185
 
6186
  /* Now both pointers alias if their points-to solution intersects.  */
6187
  return (pt1->vars
6188
          && pt2->vars
6189
          && bitmap_intersect_p (pt1->vars, pt2->vars));
6190
}
6191
 
6192
bool
6193
pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6194
{
6195
  bool res = pt_solutions_intersect_1 (pt1, pt2);
6196
  if (res)
6197
    ++pta_stats.pt_solutions_intersect_may_alias;
6198
  else
6199
    ++pta_stats.pt_solutions_intersect_no_alias;
6200
  return res;
6201
}
6202
 
6203
 
6204
/* Dump points-to information to OUTFILE.  */
6205
 
6206
static void
6207
dump_sa_points_to_info (FILE *outfile)
6208
{
6209
  unsigned int i;
6210
 
6211
  fprintf (outfile, "\nPoints-to sets\n\n");
6212
 
6213
  if (dump_flags & TDF_STATS)
6214
    {
6215
      fprintf (outfile, "Stats:\n");
6216
      fprintf (outfile, "Total vars:               %d\n", stats.total_vars);
6217
      fprintf (outfile, "Non-pointer vars:          %d\n",
6218
               stats.nonpointer_vars);
6219
      fprintf (outfile, "Statically unified vars:  %d\n",
6220
               stats.unified_vars_static);
6221
      fprintf (outfile, "Dynamically unified vars: %d\n",
6222
               stats.unified_vars_dynamic);
6223
      fprintf (outfile, "Iterations:               %d\n", stats.iterations);
6224
      fprintf (outfile, "Number of edges:          %d\n", stats.num_edges);
6225
      fprintf (outfile, "Number of implicit edges: %d\n",
6226
               stats.num_implicit_edges);
6227
    }
6228
 
6229
  for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
6230
    {
6231
      varinfo_t vi = get_varinfo (i);
6232
      if (!vi->may_have_pointers)
6233
        continue;
6234
      dump_solution_for_var (outfile, i);
6235
    }
6236
}
6237
 
6238
 
6239
/* Debug points-to information to stderr.  */
6240
 
6241
DEBUG_FUNCTION void
6242
debug_sa_points_to_info (void)
6243
{
6244
  dump_sa_points_to_info (stderr);
6245
}
6246
 
6247
 
6248
/* Initialize the always-existing constraint variables for NULL
6249
   ANYTHING, READONLY, and INTEGER */
6250
 
6251
static void
6252
init_base_vars (void)
6253
{
6254
  struct constraint_expr lhs, rhs;
6255
  varinfo_t var_anything;
6256
  varinfo_t var_nothing;
6257
  varinfo_t var_readonly;
6258
  varinfo_t var_escaped;
6259
  varinfo_t var_nonlocal;
6260
  varinfo_t var_storedanything;
6261
  varinfo_t var_integer;
6262
 
6263
  /* Create the NULL variable, used to represent that a variable points
6264
     to NULL.  */
6265
  var_nothing = new_var_info (NULL_TREE, "NULL");
6266
  gcc_assert (var_nothing->id == nothing_id);
6267
  var_nothing->is_artificial_var = 1;
6268
  var_nothing->offset = 0;
6269
  var_nothing->size = ~0;
6270
  var_nothing->fullsize = ~0;
6271
  var_nothing->is_special_var = 1;
6272
  var_nothing->may_have_pointers = 0;
6273
  var_nothing->is_global_var = 0;
6274
 
6275
  /* Create the ANYTHING variable, used to represent that a variable
6276
     points to some unknown piece of memory.  */
6277
  var_anything = new_var_info (NULL_TREE, "ANYTHING");
6278
  gcc_assert (var_anything->id == anything_id);
6279
  var_anything->is_artificial_var = 1;
6280
  var_anything->size = ~0;
6281
  var_anything->offset = 0;
6282
  var_anything->next = NULL;
6283
  var_anything->fullsize = ~0;
6284
  var_anything->is_special_var = 1;
6285
 
6286
  /* Anything points to anything.  This makes deref constraints just
6287
     work in the presence of linked list and other p = *p type loops,
6288
     by saying that *ANYTHING = ANYTHING. */
6289
  lhs.type = SCALAR;
6290
  lhs.var = anything_id;
6291
  lhs.offset = 0;
6292
  rhs.type = ADDRESSOF;
6293
  rhs.var = anything_id;
6294
  rhs.offset = 0;
6295
 
6296
  /* This specifically does not use process_constraint because
6297
     process_constraint ignores all anything = anything constraints, since all
6298
     but this one are redundant.  */
6299
  VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
6300
 
6301
  /* Create the READONLY variable, used to represent that a variable
6302
     points to readonly memory.  */
6303
  var_readonly = new_var_info (NULL_TREE, "READONLY");
6304
  gcc_assert (var_readonly->id == readonly_id);
6305
  var_readonly->is_artificial_var = 1;
6306
  var_readonly->offset = 0;
6307
  var_readonly->size = ~0;
6308
  var_readonly->fullsize = ~0;
6309
  var_readonly->next = NULL;
6310
  var_readonly->is_special_var = 1;
6311
 
6312
  /* readonly memory points to anything, in order to make deref
6313
     easier.  In reality, it points to anything the particular
6314
     readonly variable can point to, but we don't track this
6315
     separately. */
6316
  lhs.type = SCALAR;
6317
  lhs.var = readonly_id;
6318
  lhs.offset = 0;
6319
  rhs.type = ADDRESSOF;
6320
  rhs.var = readonly_id;  /* FIXME */
6321
  rhs.offset = 0;
6322
  process_constraint (new_constraint (lhs, rhs));
6323
 
6324
  /* Create the ESCAPED variable, used to represent the set of escaped
6325
     memory.  */
6326
  var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6327
  gcc_assert (var_escaped->id == escaped_id);
6328
  var_escaped->is_artificial_var = 1;
6329
  var_escaped->offset = 0;
6330
  var_escaped->size = ~0;
6331
  var_escaped->fullsize = ~0;
6332
  var_escaped->is_special_var = 0;
6333
 
6334
  /* Create the NONLOCAL variable, used to represent the set of nonlocal
6335
     memory.  */
6336
  var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6337
  gcc_assert (var_nonlocal->id == nonlocal_id);
6338
  var_nonlocal->is_artificial_var = 1;
6339
  var_nonlocal->offset = 0;
6340
  var_nonlocal->size = ~0;
6341
  var_nonlocal->fullsize = ~0;
6342
  var_nonlocal->is_special_var = 1;
6343
 
6344
  /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc.  */
6345
  lhs.type = SCALAR;
6346
  lhs.var = escaped_id;
6347
  lhs.offset = 0;
6348
  rhs.type = DEREF;
6349
  rhs.var = escaped_id;
6350
  rhs.offset = 0;
6351
  process_constraint (new_constraint (lhs, rhs));
6352
 
6353
  /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6354
     whole variable escapes.  */
6355
  lhs.type = SCALAR;
6356
  lhs.var = escaped_id;
6357
  lhs.offset = 0;
6358
  rhs.type = SCALAR;
6359
  rhs.var = escaped_id;
6360
  rhs.offset = UNKNOWN_OFFSET;
6361
  process_constraint (new_constraint (lhs, rhs));
6362
 
6363
  /* *ESCAPED = NONLOCAL.  This is true because we have to assume
6364
     everything pointed to by escaped points to what global memory can
6365
     point to.  */
6366
  lhs.type = DEREF;
6367
  lhs.var = escaped_id;
6368
  lhs.offset = 0;
6369
  rhs.type = SCALAR;
6370
  rhs.var = nonlocal_id;
6371
  rhs.offset = 0;
6372
  process_constraint (new_constraint (lhs, rhs));
6373
 
6374
  /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED.  This is true because
6375
     global memory may point to global memory and escaped memory.  */
6376
  lhs.type = SCALAR;
6377
  lhs.var = nonlocal_id;
6378
  lhs.offset = 0;
6379
  rhs.type = ADDRESSOF;
6380
  rhs.var = nonlocal_id;
6381
  rhs.offset = 0;
6382
  process_constraint (new_constraint (lhs, rhs));
6383
  rhs.type = ADDRESSOF;
6384
  rhs.var = escaped_id;
6385
  rhs.offset = 0;
6386
  process_constraint (new_constraint (lhs, rhs));
6387
 
6388
  /* Create the STOREDANYTHING variable, used to represent the set of
6389
     variables stored to *ANYTHING.  */
6390
  var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6391
  gcc_assert (var_storedanything->id == storedanything_id);
6392
  var_storedanything->is_artificial_var = 1;
6393
  var_storedanything->offset = 0;
6394
  var_storedanything->size = ~0;
6395
  var_storedanything->fullsize = ~0;
6396
  var_storedanything->is_special_var = 0;
6397
 
6398
  /* Create the INTEGER variable, used to represent that a variable points
6399
     to what an INTEGER "points to".  */
6400
  var_integer = new_var_info (NULL_TREE, "INTEGER");
6401
  gcc_assert (var_integer->id == integer_id);
6402
  var_integer->is_artificial_var = 1;
6403
  var_integer->size = ~0;
6404
  var_integer->fullsize = ~0;
6405
  var_integer->offset = 0;
6406
  var_integer->next = NULL;
6407
  var_integer->is_special_var = 1;
6408
 
6409
  /* INTEGER = ANYTHING, because we don't know where a dereference of
6410
     a random integer will point to.  */
6411
  lhs.type = SCALAR;
6412
  lhs.var = integer_id;
6413
  lhs.offset = 0;
6414
  rhs.type = ADDRESSOF;
6415
  rhs.var = anything_id;
6416
  rhs.offset = 0;
6417
  process_constraint (new_constraint (lhs, rhs));
6418
}
6419
 
6420
/* Initialize things necessary to perform PTA */
6421
 
6422
static void
6423
init_alias_vars (void)
6424
{
6425
  use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6426
 
6427
  bitmap_obstack_initialize (&pta_obstack);
6428
  bitmap_obstack_initialize (&oldpta_obstack);
6429
  bitmap_obstack_initialize (&predbitmap_obstack);
6430
 
6431
  constraint_pool = create_alloc_pool ("Constraint pool",
6432
                                       sizeof (struct constraint), 30);
6433
  variable_info_pool = create_alloc_pool ("Variable info pool",
6434
                                          sizeof (struct variable_info), 30);
6435
  constraints = VEC_alloc (constraint_t, heap, 8);
6436
  varmap = VEC_alloc (varinfo_t, heap, 8);
6437
  vi_for_tree = pointer_map_create ();
6438
  call_stmt_vars = pointer_map_create ();
6439
 
6440
  memset (&stats, 0, sizeof (stats));
6441
  shared_bitmap_table = htab_create (511, shared_bitmap_hash,
6442
                                     shared_bitmap_eq, free);
6443
  init_base_vars ();
6444
 
6445
  gcc_obstack_init (&fake_var_decl_obstack);
6446
}
6447
 
6448
/* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6449
   predecessor edges.  */
6450
 
6451
static void
6452
remove_preds_and_fake_succs (constraint_graph_t graph)
6453
{
6454
  unsigned int i;
6455
 
6456
  /* Clear the implicit ref and address nodes from the successor
6457
     lists.  */
6458
  for (i = 0; i < FIRST_REF_NODE; i++)
6459
    {
6460
      if (graph->succs[i])
6461
        bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6462
                            FIRST_REF_NODE * 2);
6463
    }
6464
 
6465
  /* Free the successor list for the non-ref nodes.  */
6466
  for (i = FIRST_REF_NODE; i < graph->size; i++)
6467
    {
6468
      if (graph->succs[i])
6469
        BITMAP_FREE (graph->succs[i]);
6470
    }
6471
 
6472
  /* Now reallocate the size of the successor list as, and blow away
6473
     the predecessor bitmaps.  */
6474
  graph->size = VEC_length (varinfo_t, varmap);
6475
  graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6476
 
6477
  free (graph->implicit_preds);
6478
  graph->implicit_preds = NULL;
6479
  free (graph->preds);
6480
  graph->preds = NULL;
6481
  bitmap_obstack_release (&predbitmap_obstack);
6482
}
6483
 
6484
/* Solve the constraint set.  */
6485
 
6486
static void
6487
solve_constraints (void)
6488
{
6489
  struct scc_info *si;
6490
 
6491
  if (dump_file)
6492
    fprintf (dump_file,
6493
             "\nCollapsing static cycles and doing variable "
6494
             "substitution\n");
6495
 
6496
  init_graph (VEC_length (varinfo_t, varmap) * 2);
6497
 
6498
  if (dump_file)
6499
    fprintf (dump_file, "Building predecessor graph\n");
6500
  build_pred_graph ();
6501
 
6502
  if (dump_file)
6503
    fprintf (dump_file, "Detecting pointer and location "
6504
             "equivalences\n");
6505
  si = perform_var_substitution (graph);
6506
 
6507
  if (dump_file)
6508
    fprintf (dump_file, "Rewriting constraints and unifying "
6509
             "variables\n");
6510
  rewrite_constraints (graph, si);
6511
 
6512
  build_succ_graph ();
6513
 
6514
  free_var_substitution_info (si);
6515
 
6516
  /* Attach complex constraints to graph nodes.  */
6517
  move_complex_constraints (graph);
6518
 
6519
  if (dump_file)
6520
    fprintf (dump_file, "Uniting pointer but not location equivalent "
6521
             "variables\n");
6522
  unite_pointer_equivalences (graph);
6523
 
6524
  if (dump_file)
6525
    fprintf (dump_file, "Finding indirect cycles\n");
6526
  find_indirect_cycles (graph);
6527
 
6528
  /* Implicit nodes and predecessors are no longer necessary at this
6529
     point. */
6530
  remove_preds_and_fake_succs (graph);
6531
 
6532
  if (dump_file && (dump_flags & TDF_GRAPH))
6533
    {
6534
      fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6535
               "in dot format:\n");
6536
      dump_constraint_graph (dump_file);
6537
      fprintf (dump_file, "\n\n");
6538
    }
6539
 
6540
  if (dump_file)
6541
    fprintf (dump_file, "Solving graph\n");
6542
 
6543
  solve_graph (graph);
6544
 
6545
  if (dump_file && (dump_flags & TDF_GRAPH))
6546
    {
6547
      fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6548
               "in dot format:\n");
6549
      dump_constraint_graph (dump_file);
6550
      fprintf (dump_file, "\n\n");
6551
    }
6552
 
6553
  if (dump_file)
6554
    dump_sa_points_to_info (dump_file);
6555
}
6556
 
6557
/* Create points-to sets for the current function.  See the comments
6558
   at the start of the file for an algorithmic overview.  */
6559
 
6560
static void
6561
compute_points_to_sets (void)
6562
{
6563
  basic_block bb;
6564
  unsigned i;
6565
  varinfo_t vi;
6566
 
6567
  timevar_push (TV_TREE_PTA);
6568
 
6569
  init_alias_vars ();
6570
 
6571
  intra_create_variable_infos ();
6572
 
6573
  /* Now walk all statements and build the constraint set.  */
6574
  FOR_EACH_BB (bb)
6575
    {
6576
      gimple_stmt_iterator gsi;
6577
 
6578
      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6579
        {
6580
          gimple phi = gsi_stmt (gsi);
6581
 
6582
          if (is_gimple_reg (gimple_phi_result (phi)))
6583
            find_func_aliases (phi);
6584
        }
6585
 
6586
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6587
        {
6588
          gimple stmt = gsi_stmt (gsi);
6589
 
6590
          find_func_aliases (stmt);
6591
        }
6592
    }
6593
 
6594
  if (dump_file)
6595
    {
6596
      fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6597
      dump_constraints (dump_file, 0);
6598
    }
6599
 
6600
  /* From the constraints compute the points-to sets.  */
6601
  solve_constraints ();
6602
 
6603
  /* Compute the points-to set for ESCAPED used for call-clobber analysis.  */
6604
  find_what_var_points_to (get_varinfo (escaped_id),
6605
                           &cfun->gimple_df->escaped);
6606
 
6607
  /* Make sure the ESCAPED solution (which is used as placeholder in
6608
     other solutions) does not reference itself.  This simplifies
6609
     points-to solution queries.  */
6610
  cfun->gimple_df->escaped.escaped = 0;
6611
 
6612
  /* Mark escaped HEAP variables as global.  */
6613
  FOR_EACH_VEC_ELT (varinfo_t, varmap, i, vi)
6614
    if (vi->is_heap_var
6615
        && !vi->is_global_var)
6616
      DECL_EXTERNAL (vi->decl) = vi->is_global_var
6617
        = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6618
 
6619
  /* Compute the points-to sets for pointer SSA_NAMEs.  */
6620
  for (i = 0; i < num_ssa_names; ++i)
6621
    {
6622
      tree ptr = ssa_name (i);
6623
      if (ptr
6624
          && POINTER_TYPE_P (TREE_TYPE (ptr)))
6625
        find_what_p_points_to (ptr);
6626
    }
6627
 
6628
  /* Compute the call-used/clobbered sets.  */
6629
  FOR_EACH_BB (bb)
6630
    {
6631
      gimple_stmt_iterator gsi;
6632
 
6633
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6634
        {
6635
          gimple stmt = gsi_stmt (gsi);
6636
          struct pt_solution *pt;
6637
          if (!is_gimple_call (stmt))
6638
            continue;
6639
 
6640
          pt = gimple_call_use_set (stmt);
6641
          if (gimple_call_flags (stmt) & ECF_CONST)
6642
            memset (pt, 0, sizeof (struct pt_solution));
6643
          else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6644
            {
6645
              find_what_var_points_to (vi, pt);
6646
              /* Escaped (and thus nonlocal) variables are always
6647
                 implicitly used by calls.  */
6648
              /* ???  ESCAPED can be empty even though NONLOCAL
6649
                 always escaped.  */
6650
              pt->nonlocal = 1;
6651
              pt->escaped = 1;
6652
            }
6653
          else
6654
            {
6655
              /* If there is nothing special about this call then
6656
                 we have made everything that is used also escape.  */
6657
              *pt = cfun->gimple_df->escaped;
6658
              pt->nonlocal = 1;
6659
            }
6660
 
6661
          pt = gimple_call_clobber_set (stmt);
6662
          if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6663
            memset (pt, 0, sizeof (struct pt_solution));
6664
          else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6665
            {
6666
              find_what_var_points_to (vi, pt);
6667
              /* Escaped (and thus nonlocal) variables are always
6668
                 implicitly clobbered by calls.  */
6669
              /* ???  ESCAPED can be empty even though NONLOCAL
6670
                 always escaped.  */
6671
              pt->nonlocal = 1;
6672
              pt->escaped = 1;
6673
            }
6674
          else
6675
            {
6676
              /* If there is nothing special about this call then
6677
                 we have made everything that is used also escape.  */
6678
              *pt = cfun->gimple_df->escaped;
6679
              pt->nonlocal = 1;
6680
            }
6681
        }
6682
    }
6683
 
6684
  timevar_pop (TV_TREE_PTA);
6685
}
6686
 
6687
 
6688
/* Delete created points-to sets.  */
6689
 
6690
static void
6691
delete_points_to_sets (void)
6692
{
6693
  unsigned int i;
6694
 
6695
  htab_delete (shared_bitmap_table);
6696
  if (dump_file && (dump_flags & TDF_STATS))
6697
    fprintf (dump_file, "Points to sets created:%d\n",
6698
             stats.points_to_sets_created);
6699
 
6700
  pointer_map_destroy (vi_for_tree);
6701
  pointer_map_destroy (call_stmt_vars);
6702
  bitmap_obstack_release (&pta_obstack);
6703
  VEC_free (constraint_t, heap, constraints);
6704
 
6705
  for (i = 0; i < graph->size; i++)
6706
    VEC_free (constraint_t, heap, graph->complex[i]);
6707
  free (graph->complex);
6708
 
6709
  free (graph->rep);
6710
  free (graph->succs);
6711
  free (graph->pe);
6712
  free (graph->pe_rep);
6713
  free (graph->indirect_cycles);
6714
  free (graph);
6715
 
6716
  VEC_free (varinfo_t, heap, varmap);
6717
  free_alloc_pool (variable_info_pool);
6718
  free_alloc_pool (constraint_pool);
6719
 
6720
  obstack_free (&fake_var_decl_obstack, NULL);
6721
}
6722
 
6723
 
6724
/* Compute points-to information for every SSA_NAME pointer in the
6725
   current function and compute the transitive closure of escaped
6726
   variables to re-initialize the call-clobber states of local variables.  */
6727
 
6728
unsigned int
6729
compute_may_aliases (void)
6730
{
6731
  if (cfun->gimple_df->ipa_pta)
6732
    {
6733
      if (dump_file)
6734
        {
6735
          fprintf (dump_file, "\nNot re-computing points-to information "
6736
                   "because IPA points-to information is available.\n\n");
6737
 
6738
          /* But still dump what we have remaining it.  */
6739
          dump_alias_info (dump_file);
6740
 
6741
          if (dump_flags & TDF_DETAILS)
6742
            dump_referenced_vars (dump_file);
6743
        }
6744
 
6745
      return 0;
6746
    }
6747
 
6748
  /* For each pointer P_i, determine the sets of variables that P_i may
6749
     point-to.  Compute the reachability set of escaped and call-used
6750
     variables.  */
6751
  compute_points_to_sets ();
6752
 
6753
  /* Debugging dumps.  */
6754
  if (dump_file)
6755
    {
6756
      dump_alias_info (dump_file);
6757
 
6758
      if (dump_flags & TDF_DETAILS)
6759
        dump_referenced_vars (dump_file);
6760
    }
6761
 
6762
  /* Deallocate memory used by aliasing data structures and the internal
6763
     points-to solution.  */
6764
  delete_points_to_sets ();
6765
 
6766
  gcc_assert (!need_ssa_update_p (cfun));
6767
 
6768
  return 0;
6769
}
6770
 
6771
static bool
6772
gate_tree_pta (void)
6773
{
6774
  return flag_tree_pta;
6775
}
6776
 
6777
/* A dummy pass to cause points-to information to be computed via
6778
   TODO_rebuild_alias.  */
6779
 
6780
struct gimple_opt_pass pass_build_alias =
6781
{
6782
 {
6783
  GIMPLE_PASS,
6784
  "alias",                  /* name */
6785
  gate_tree_pta,            /* gate */
6786
  NULL,                     /* execute */
6787
  NULL,                     /* sub */
6788
  NULL,                     /* next */
6789
  0,                        /* static_pass_number */
6790
  TV_NONE,                  /* tv_id */
6791
  PROP_cfg | PROP_ssa,      /* properties_required */
6792
  0,                         /* properties_provided */
6793
  0,                        /* properties_destroyed */
6794
  0,                        /* todo_flags_start */
6795
  TODO_rebuild_alias        /* todo_flags_finish */
6796
 }
6797
};
6798
 
6799
/* A dummy pass to cause points-to information to be computed via
6800
   TODO_rebuild_alias.  */
6801
 
6802
struct gimple_opt_pass pass_build_ealias =
6803
{
6804
 {
6805
  GIMPLE_PASS,
6806
  "ealias",                 /* name */
6807
  gate_tree_pta,            /* gate */
6808
  NULL,                     /* execute */
6809
  NULL,                     /* sub */
6810
  NULL,                     /* next */
6811
  0,                        /* static_pass_number */
6812
  TV_NONE,                  /* tv_id */
6813
  PROP_cfg | PROP_ssa,      /* properties_required */
6814
  0,                         /* properties_provided */
6815
  0,                        /* properties_destroyed */
6816
  0,                        /* todo_flags_start */
6817
  TODO_rebuild_alias        /* todo_flags_finish */
6818
 }
6819
};
6820
 
6821
 
6822
/* Return true if we should execute IPA PTA.  */
6823
static bool
6824
gate_ipa_pta (void)
6825
{
6826
  return (optimize
6827
          && flag_ipa_pta
6828
          /* Don't bother doing anything if the program has errors.  */
6829
          && !seen_error ());
6830
}
6831
 
6832
/* IPA PTA solutions for ESCAPED.  */
6833
struct pt_solution ipa_escaped_pt
6834
  = { true, false, false, false, false, false, NULL };
6835
 
6836
/* Associate node with varinfo DATA. Worker for
6837
   cgraph_for_node_and_aliases.  */
6838
static bool
6839
associate_varinfo_to_alias (struct cgraph_node *node, void *data)
6840
{
6841
  if (node->alias || node->thunk.thunk_p)
6842
    insert_vi_for_tree (node->decl, (varinfo_t)data);
6843
  return false;
6844
}
6845
 
6846
/* Execute the driver for IPA PTA.  */
6847
static unsigned int
6848
ipa_pta_execute (void)
6849
{
6850
  struct cgraph_node *node;
6851
  struct varpool_node *var;
6852
  int from;
6853
 
6854
  in_ipa_mode = 1;
6855
 
6856
  init_alias_vars ();
6857
 
6858
  if (dump_file && (dump_flags & TDF_DETAILS))
6859
    {
6860
      dump_cgraph (dump_file);
6861
      fprintf (dump_file, "\n");
6862
    }
6863
 
6864
  /* Build the constraints.  */
6865
  for (node = cgraph_nodes; node; node = node->next)
6866
    {
6867
      varinfo_t vi;
6868
      /* Nodes without a body are not interesting.  Especially do not
6869
         visit clones at this point for now - we get duplicate decls
6870
         there for inline clones at least.  */
6871
      if (!cgraph_function_with_gimple_body_p (node))
6872
        continue;
6873
 
6874
      gcc_assert (!node->clone_of);
6875
 
6876
      vi = create_function_info_for (node->decl,
6877
                                     alias_get_name (node->decl));
6878
      cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
6879
    }
6880
 
6881
  /* Create constraints for global variables and their initializers.  */
6882
  for (var = varpool_nodes; var; var = var->next)
6883
    {
6884
      if (var->alias)
6885
        continue;
6886
 
6887
      get_vi_for_tree (var->decl);
6888
    }
6889
 
6890
  if (dump_file)
6891
    {
6892
      fprintf (dump_file,
6893
               "Generating constraints for global initializers\n\n");
6894
      dump_constraints (dump_file, 0);
6895
      fprintf (dump_file, "\n");
6896
    }
6897
  from = VEC_length (constraint_t, constraints);
6898
 
6899
  for (node = cgraph_nodes; node; node = node->next)
6900
    {
6901
      struct function *func;
6902
      basic_block bb;
6903
      tree old_func_decl;
6904
 
6905
      /* Nodes without a body are not interesting.  */
6906
      if (!cgraph_function_with_gimple_body_p (node))
6907
        continue;
6908
 
6909
      if (dump_file)
6910
        {
6911
          fprintf (dump_file,
6912
                   "Generating constraints for %s", cgraph_node_name (node));
6913
          if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
6914
            fprintf (dump_file, " (%s)",
6915
                     IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
6916
          fprintf (dump_file, "\n");
6917
        }
6918
 
6919
      func = DECL_STRUCT_FUNCTION (node->decl);
6920
      old_func_decl = current_function_decl;
6921
      push_cfun (func);
6922
      current_function_decl = node->decl;
6923
 
6924
      /* For externally visible or attribute used annotated functions use
6925
         local constraints for their arguments.
6926
         For local functions we see all callers and thus do not need initial
6927
         constraints for parameters.  */
6928
      if (node->reachable_from_other_partition
6929
          || node->local.externally_visible
6930
          || node->needed)
6931
        {
6932
          intra_create_variable_infos ();
6933
 
6934
          /* We also need to make function return values escape.  Nothing
6935
             escapes by returning from main though.  */
6936
          if (!MAIN_NAME_P (DECL_NAME (node->decl)))
6937
            {
6938
              varinfo_t fi, rvi;
6939
              fi = lookup_vi_for_tree (node->decl);
6940
              rvi = first_vi_for_offset (fi, fi_result);
6941
              if (rvi && rvi->offset == fi_result)
6942
                {
6943
                  struct constraint_expr includes;
6944
                  struct constraint_expr var;
6945
                  includes.var = escaped_id;
6946
                  includes.offset = 0;
6947
                  includes.type = SCALAR;
6948
                  var.var = rvi->id;
6949
                  var.offset = 0;
6950
                  var.type = SCALAR;
6951
                  process_constraint (new_constraint (includes, var));
6952
                }
6953
            }
6954
        }
6955
 
6956
      /* Build constriants for the function body.  */
6957
      FOR_EACH_BB_FN (bb, func)
6958
        {
6959
          gimple_stmt_iterator gsi;
6960
 
6961
          for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6962
               gsi_next (&gsi))
6963
            {
6964
              gimple phi = gsi_stmt (gsi);
6965
 
6966
              if (is_gimple_reg (gimple_phi_result (phi)))
6967
                find_func_aliases (phi);
6968
            }
6969
 
6970
          for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6971
            {
6972
              gimple stmt = gsi_stmt (gsi);
6973
 
6974
              find_func_aliases (stmt);
6975
              find_func_clobbers (stmt);
6976
            }
6977
        }
6978
 
6979
      current_function_decl = old_func_decl;
6980
      pop_cfun ();
6981
 
6982
      if (dump_file)
6983
        {
6984
          fprintf (dump_file, "\n");
6985
          dump_constraints (dump_file, from);
6986
          fprintf (dump_file, "\n");
6987
        }
6988
      from = VEC_length (constraint_t, constraints);
6989
    }
6990
 
6991
  /* From the constraints compute the points-to sets.  */
6992
  solve_constraints ();
6993
 
6994
  /* Compute the global points-to sets for ESCAPED.
6995
     ???  Note that the computed escape set is not correct
6996
     for the whole unit as we fail to consider graph edges to
6997
     externally visible functions.  */
6998
  find_what_var_points_to (get_varinfo (escaped_id), &ipa_escaped_pt);
6999
 
7000
  /* Make sure the ESCAPED solution (which is used as placeholder in
7001
     other solutions) does not reference itself.  This simplifies
7002
     points-to solution queries.  */
7003
  ipa_escaped_pt.ipa_escaped = 0;
7004
 
7005
  /* Assign the points-to sets to the SSA names in the unit.  */
7006
  for (node = cgraph_nodes; node; node = node->next)
7007
    {
7008
      tree ptr;
7009
      struct function *fn;
7010
      unsigned i;
7011
      varinfo_t fi;
7012
      basic_block bb;
7013
      struct pt_solution uses, clobbers;
7014
      struct cgraph_edge *e;
7015
 
7016
      /* Nodes without a body are not interesting.  */
7017
      if (!cgraph_function_with_gimple_body_p (node))
7018
        continue;
7019
 
7020
      fn = DECL_STRUCT_FUNCTION (node->decl);
7021
 
7022
      /* Compute the points-to sets for pointer SSA_NAMEs.  */
7023
      FOR_EACH_VEC_ELT (tree, fn->gimple_df->ssa_names, i, ptr)
7024
        {
7025
          if (ptr
7026
              && POINTER_TYPE_P (TREE_TYPE (ptr)))
7027
            find_what_p_points_to (ptr);
7028
        }
7029
 
7030
      /* Compute the call-use and call-clobber sets for all direct calls.  */
7031
      fi = lookup_vi_for_tree (node->decl);
7032
      gcc_assert (fi->is_fn_info);
7033
      find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers),
7034
                               &clobbers);
7035
      find_what_var_points_to (first_vi_for_offset (fi, fi_uses), &uses);
7036
      for (e = node->callers; e; e = e->next_caller)
7037
        {
7038
          if (!e->call_stmt)
7039
            continue;
7040
 
7041
          *gimple_call_clobber_set (e->call_stmt) = clobbers;
7042
          *gimple_call_use_set (e->call_stmt) = uses;
7043
        }
7044
 
7045
      /* Compute the call-use and call-clobber sets for indirect calls
7046
         and calls to external functions.  */
7047
      FOR_EACH_BB_FN (bb, fn)
7048
        {
7049
          gimple_stmt_iterator gsi;
7050
 
7051
          for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7052
            {
7053
              gimple stmt = gsi_stmt (gsi);
7054
              struct pt_solution *pt;
7055
              varinfo_t vi;
7056
              tree decl;
7057
 
7058
              if (!is_gimple_call (stmt))
7059
                continue;
7060
 
7061
              /* Handle direct calls to external functions.  */
7062
              decl = gimple_call_fndecl (stmt);
7063
              if (decl
7064
                  && (!(fi = lookup_vi_for_tree (decl))
7065
                      || !fi->is_fn_info))
7066
                {
7067
                  pt = gimple_call_use_set (stmt);
7068
                  if (gimple_call_flags (stmt) & ECF_CONST)
7069
                    memset (pt, 0, sizeof (struct pt_solution));
7070
                  else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7071
                    {
7072
                      find_what_var_points_to (vi, pt);
7073
                      /* Escaped (and thus nonlocal) variables are always
7074
                         implicitly used by calls.  */
7075
                      /* ???  ESCAPED can be empty even though NONLOCAL
7076
                         always escaped.  */
7077
                      pt->nonlocal = 1;
7078
                      pt->ipa_escaped = 1;
7079
                    }
7080
                  else
7081
                    {
7082
                      /* If there is nothing special about this call then
7083
                         we have made everything that is used also escape.  */
7084
                      *pt = ipa_escaped_pt;
7085
                      pt->nonlocal = 1;
7086
                    }
7087
 
7088
                  pt = gimple_call_clobber_set (stmt);
7089
                  if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7090
                    memset (pt, 0, sizeof (struct pt_solution));
7091
                  else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7092
                    {
7093
                      find_what_var_points_to (vi, pt);
7094
                      /* Escaped (and thus nonlocal) variables are always
7095
                         implicitly clobbered by calls.  */
7096
                      /* ???  ESCAPED can be empty even though NONLOCAL
7097
                         always escaped.  */
7098
                      pt->nonlocal = 1;
7099
                      pt->ipa_escaped = 1;
7100
                    }
7101
                  else
7102
                    {
7103
                      /* If there is nothing special about this call then
7104
                         we have made everything that is used also escape.  */
7105
                      *pt = ipa_escaped_pt;
7106
                      pt->nonlocal = 1;
7107
                    }
7108
                }
7109
 
7110
              /* Handle indirect calls.  */
7111
              if (!decl
7112
                  && (fi = get_fi_for_callee (stmt)))
7113
                {
7114
                  /* We need to accumulate all clobbers/uses of all possible
7115
                     callees.  */
7116
                  fi = get_varinfo (find (fi->id));
7117
                  /* If we cannot constrain the set of functions we'll end up
7118
                     calling we end up using/clobbering everything.  */
7119
                  if (bitmap_bit_p (fi->solution, anything_id)
7120
                      || bitmap_bit_p (fi->solution, nonlocal_id)
7121
                      || bitmap_bit_p (fi->solution, escaped_id))
7122
                    {
7123
                      pt_solution_reset (gimple_call_clobber_set (stmt));
7124
                      pt_solution_reset (gimple_call_use_set (stmt));
7125
                    }
7126
                  else
7127
                    {
7128
                      bitmap_iterator bi;
7129
                      unsigned i;
7130
                      struct pt_solution *uses, *clobbers;
7131
 
7132
                      uses = gimple_call_use_set (stmt);
7133
                      clobbers = gimple_call_clobber_set (stmt);
7134
                      memset (uses, 0, sizeof (struct pt_solution));
7135
                      memset (clobbers, 0, sizeof (struct pt_solution));
7136
                      EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7137
                        {
7138
                          struct pt_solution sol;
7139
 
7140
                          vi = get_varinfo (i);
7141
                          if (!vi->is_fn_info)
7142
                            {
7143
                              /* ???  We could be more precise here?  */
7144
                              uses->nonlocal = 1;
7145
                              uses->ipa_escaped = 1;
7146
                              clobbers->nonlocal = 1;
7147
                              clobbers->ipa_escaped = 1;
7148
                              continue;
7149
                            }
7150
 
7151
                          if (!uses->anything)
7152
                            {
7153
                              find_what_var_points_to
7154
                                  (first_vi_for_offset (vi, fi_uses), &sol);
7155
                              pt_solution_ior_into (uses, &sol);
7156
                            }
7157
                          if (!clobbers->anything)
7158
                            {
7159
                              find_what_var_points_to
7160
                                  (first_vi_for_offset (vi, fi_clobbers), &sol);
7161
                              pt_solution_ior_into (clobbers, &sol);
7162
                            }
7163
                        }
7164
                    }
7165
                }
7166
            }
7167
        }
7168
 
7169
      fn->gimple_df->ipa_pta = true;
7170
    }
7171
 
7172
  delete_points_to_sets ();
7173
 
7174
  in_ipa_mode = 0;
7175
 
7176
  return 0;
7177
}
7178
 
7179
struct simple_ipa_opt_pass pass_ipa_pta =
7180
{
7181
 {
7182
  SIMPLE_IPA_PASS,
7183
  "pta",                                /* name */
7184
  gate_ipa_pta,                 /* gate */
7185
  ipa_pta_execute,                      /* execute */
7186
  NULL,                                 /* sub */
7187
  NULL,                                 /* next */
7188
  0,                                     /* static_pass_number */
7189
  TV_IPA_PTA,                   /* tv_id */
7190
  0,                                     /* properties_required */
7191
  0,                                     /* properties_provided */
7192
  0,                                     /* properties_destroyed */
7193
  0,                                     /* todo_flags_start */
7194
  TODO_update_ssa                       /* todo_flags_finish */
7195
 }
7196
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.