OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [tree-sra.c] - Blame information for rev 378

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Scalar Replacement of Aggregates (SRA) converts some structure
2
   references into scalar references, exposing them to the scalar
3
   optimizers.
4
   Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
5
   Contributed by Martin Jambor <mjambor@suse.cz>
6
 
7
This file is part of GCC.
8
 
9
GCC is free software; you can redistribute it and/or modify it under
10
the terms of the GNU General Public License as published by the Free
11
Software Foundation; either version 3, or (at your option) any later
12
version.
13
 
14
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15
WARRANTY; without even the implied warranty of MERCHANTABILITY or
16
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17
for more details.
18
 
19
You should have received a copy of the GNU General Public License
20
along with GCC; see the file COPYING3.  If not see
21
<http://www.gnu.org/licenses/>.  */
22
 
23
/* This file implements Scalar Reduction of Aggregates (SRA).  SRA is run
24
   twice, once in the early stages of compilation (early SRA) and once in the
25
   late stages (late SRA).  The aim of both is to turn references to scalar
26
   parts of aggregates into uses of independent scalar variables.
27
 
28
   The two passes are nearly identical, the only difference is that early SRA
29
   does not scalarize unions which are used as the result in a GIMPLE_RETURN
30
   statement because together with inlining this can lead to weird type
31
   conversions.
32
 
33
   Both passes operate in four stages:
34
 
35
   1. The declarations that have properties which make them candidates for
36
      scalarization are identified in function find_var_candidates().  The
37
      candidates are stored in candidate_bitmap.
38
 
39
   2. The function body is scanned.  In the process, declarations which are
40
      used in a manner that prevent their scalarization are removed from the
41
      candidate bitmap.  More importantly, for every access into an aggregate,
42
      an access structure (struct access) is created by create_access() and
43
      stored in a vector associated with the aggregate.  Among other
44
      information, the aggregate declaration, the offset and size of the access
45
      and its type are stored in the structure.
46
 
47
      On a related note, assign_link structures are created for every assign
48
      statement between candidate aggregates and attached to the related
49
      accesses.
50
 
51
   3. The vectors of accesses are analyzed.  They are first sorted according to
52
      their offset and size and then scanned for partially overlapping accesses
53
      (i.e. those which overlap but one is not entirely within another).  Such
54
      an access disqualifies the whole aggregate from being scalarized.
55
 
56
      If there is no such inhibiting overlap, a representative access structure
57
      is chosen for every unique combination of offset and size.  Afterwards,
58
      the pass builds a set of trees from these structures, in which children
59
      of an access are within their parent (in terms of offset and size).
60
 
61
      Then accesses  are propagated  whenever possible (i.e.  in cases  when it
62
      does not create a partially overlapping access) across assign_links from
63
      the right hand side to the left hand side.
64
 
65
      Then the set of trees for each declaration is traversed again and those
66
      accesses which should be replaced by a scalar are identified.
67
 
68
   4. The function is traversed again, and for every reference into an
69
      aggregate that has some component which is about to be scalarized,
70
      statements are amended and new statements are created as necessary.
71
      Finally, if a parameter got scalarized, the scalar replacements are
72
      initialized with values from respective parameter aggregates.  */
73
 
74
#include "config.h"
75
#include "system.h"
76
#include "coretypes.h"
77
#include "alloc-pool.h"
78
#include "tm.h"
79
#include "tree.h"
80
#include "expr.h"
81
#include "gimple.h"
82
#include "cgraph.h"
83
#include "tree-flow.h"
84
#include "ipa-prop.h"
85
#include "diagnostic.h"
86
#include "statistics.h"
87
#include "tree-dump.h"
88
#include "timevar.h"
89
#include "params.h"
90
#include "target.h"
91
#include "flags.h"
92
#include "tree-inline.h"
93
 
94
/* Enumeration of all aggregate reductions we can do.  */
95
enum sra_mode { SRA_MODE_EARLY_IPA,   /* early call regularization */
96
                SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
97
                SRA_MODE_INTRA };     /* late intraprocedural SRA */
98
 
99
/* Global variable describing which aggregate reduction we are performing at
100
   the moment.  */
101
static enum sra_mode sra_mode;
102
 
103
struct assign_link;
104
 
105
/* ACCESS represents each access to an aggregate variable (as a whole or a
106
   part).  It can also represent a group of accesses that refer to exactly the
107
   same fragment of an aggregate (i.e. those that have exactly the same offset
108
   and size).  Such representatives for a single aggregate, once determined,
109
   are linked in a linked list and have the group fields set.
110
 
111
   Moreover, when doing intraprocedural SRA, a tree is built from those
112
   representatives (by the means of first_child and next_sibling pointers), in
113
   which all items in a subtree are "within" the root, i.e. their offset is
114
   greater or equal to offset of the root and offset+size is smaller or equal
115
   to offset+size of the root.  Children of an access are sorted by offset.
116
 
117
   Note that accesses to parts of vector and complex number types always
118
   represented by an access to the whole complex number or a vector.  It is a
119
   duty of the modifying functions to replace them appropriately.  */
120
 
121
struct access
122
{
123
  /* Values returned by  `get_ref_base_and_extent' for each component reference
124
     If EXPR isn't a component reference  just set `BASE = EXPR', `OFFSET = 0',
125
     `SIZE = TREE_SIZE (TREE_TYPE (expr))'.  */
126
  HOST_WIDE_INT offset;
127
  HOST_WIDE_INT size;
128
  tree base;
129
 
130
  /* Expression.  It is context dependent so do not use it to create new
131
     expressions to access the original aggregate.  See PR 42154 for a
132
     testcase.  */
133
  tree expr;
134
  /* Type.  */
135
  tree type;
136
 
137
  /* The statement this access belongs to.  */
138
  gimple stmt;
139
 
140
  /* Next group representative for this aggregate. */
141
  struct access *next_grp;
142
 
143
  /* Pointer to the group representative.  Pointer to itself if the struct is
144
     the representative.  */
145
  struct access *group_representative;
146
 
147
  /* If this access has any children (in terms of the definition above), this
148
     points to the first one.  */
149
  struct access *first_child;
150
 
151
  /* In intraprocedural SRA, pointer to the next sibling in the access tree as
152
     described above.  In IPA-SRA this is a pointer to the next access
153
     belonging to the same group (having the same representative).  */
154
  struct access *next_sibling;
155
 
156
  /* Pointers to the first and last element in the linked list of assign
157
     links.  */
158
  struct assign_link *first_link, *last_link;
159
 
160
  /* Pointer to the next access in the work queue.  */
161
  struct access *next_queued;
162
 
163
  /* Replacement variable for this access "region."  Never to be accessed
164
     directly, always only by the means of get_access_replacement() and only
165
     when grp_to_be_replaced flag is set.  */
166
  tree replacement_decl;
167
 
168
  /* Is this particular access write access? */
169
  unsigned write : 1;
170
 
171
  /* Is this access an artificial one created to scalarize some record
172
     entirely? */
173
  unsigned total_scalarization : 1;
174
 
175
  /* Is this access currently in the work queue?  */
176
  unsigned grp_queued : 1;
177
 
178
  /* Does this group contain a write access?  This flag is propagated down the
179
     access tree.  */
180
  unsigned grp_write : 1;
181
 
182
  /* Does this group contain a read access?  This flag is propagated down the
183
     access tree.  */
184
  unsigned grp_read : 1;
185
 
186
  /* Does this group contain a read access that comes from an assignment
187
     statement?  This flag is propagated down the access tree.  */
188
  unsigned grp_assignment_read : 1;
189
 
190
  /* Other passes of the analysis use this bit to make function
191
     analyze_access_subtree create scalar replacements for this group if
192
     possible.  */
193
  unsigned grp_hint : 1;
194
 
195
  /* Is the subtree rooted in this access fully covered by scalar
196
     replacements?  */
197
  unsigned grp_covered : 1;
198
 
199
  /* If set to true, this access and all below it in an access tree must not be
200
     scalarized.  */
201
  unsigned grp_unscalarizable_region : 1;
202
 
203
  /* Whether data have been written to parts of the aggregate covered by this
204
     access which is not to be scalarized.  This flag is propagated up in the
205
     access tree.  */
206
  unsigned grp_unscalarized_data : 1;
207
 
208
  /* Does this access and/or group contain a write access through a
209
     BIT_FIELD_REF?  */
210
  unsigned grp_partial_lhs : 1;
211
 
212
  /* Set when a scalar replacement should be created for this variable.  We do
213
     the decision and creation at different places because create_tmp_var
214
     cannot be called from within FOR_EACH_REFERENCED_VAR. */
215
  unsigned grp_to_be_replaced : 1;
216
 
217
  /* Is it possible that the group refers to data which might be (directly or
218
     otherwise) modified?  */
219
  unsigned grp_maybe_modified : 1;
220
 
221
  /* Set when this is a representative of a pointer to scalar (i.e. by
222
     reference) parameter which we consider for turning into a plain scalar
223
     (i.e. a by value parameter).  */
224
  unsigned grp_scalar_ptr : 1;
225
 
226
  /* Set when we discover that this pointer is not safe to dereference in the
227
     caller.  */
228
  unsigned grp_not_necessarilly_dereferenced : 1;
229
};
230
 
231
typedef struct access *access_p;
232
 
233
DEF_VEC_P (access_p);
234
DEF_VEC_ALLOC_P (access_p, heap);
235
 
236
/* Alloc pool for allocating access structures.  */
237
static alloc_pool access_pool;
238
 
239
/* A structure linking lhs and rhs accesses from an aggregate assignment.  They
240
   are used to propagate subaccesses from rhs to lhs as long as they don't
241
   conflict with what is already there.  */
242
struct assign_link
243
{
244
  struct access *lacc, *racc;
245
  struct assign_link *next;
246
};
247
 
248
/* Alloc pool for allocating assign link structures.  */
249
static alloc_pool link_pool;
250
 
251
/* Base (tree) -> Vector (VEC(access_p,heap) *) map.  */
252
static struct pointer_map_t *base_access_vec;
253
 
254
/* Bitmap of candidates.  */
255
static bitmap candidate_bitmap;
256
 
257
/* Bitmap of candidates which we should try to entirely scalarize away and
258
   those which cannot be (because they are and need be used as a whole).  */
259
static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
260
 
261
/* Obstack for creation of fancy names.  */
262
static struct obstack name_obstack;
263
 
264
/* Head of a linked list of accesses that need to have its subaccesses
265
   propagated to their assignment counterparts. */
266
static struct access *work_queue_head;
267
 
268
/* Number of parameters of the analyzed function when doing early ipa SRA.  */
269
static int func_param_count;
270
 
271
/* scan_function sets the following to true if it encounters a call to
272
   __builtin_apply_args.  */
273
static bool encountered_apply_args;
274
 
275
/* Set by scan_function when it finds a recursive call with less actual
276
   arguments than formal parameters..  */
277
static bool encountered_unchangable_recursive_call;
278
 
279 378 julius
/* Set by scan_function when it changes the control flow graph.  */
280
static bool cfg_changed;
281
 
282 280 jeremybenn
/* This is a table in which for each basic block and parameter there is a
283
   distance (offset + size) in that parameter which is dereferenced and
284
   accessed in that BB.  */
285
static HOST_WIDE_INT *bb_dereferences;
286
/* Bitmap of BBs that can cause the function to "stop" progressing by
287
   returning, throwing externally, looping infinitely or calling a function
288
   which might abort etc.. */
289
static bitmap final_bbs;
290
 
291
/* Representative of no accesses at all. */
292
static struct access  no_accesses_representant;
293
 
294
/* Predicate to test the special value.  */
295
 
296
static inline bool
297
no_accesses_p (struct access *access)
298
{
299
  return access == &no_accesses_representant;
300
}
301
 
302
/* Dump contents of ACCESS to file F in a human friendly way.  If GRP is true,
303
   representative fields are dumped, otherwise those which only describe the
304
   individual access are.  */
305
 
306
static struct
307
{
308
  /* Number of processed aggregates is readily available in
309
     analyze_all_variable_accesses and so is not stored here.  */
310
 
311
  /* Number of created scalar replacements.  */
312
  int replacements;
313
 
314
  /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
315
     expression.  */
316
  int exprs;
317
 
318
  /* Number of statements created by generate_subtree_copies.  */
319
  int subtree_copies;
320
 
321
  /* Number of statements created by load_assign_lhs_subreplacements.  */
322
  int subreplacements;
323
 
324
  /* Number of times sra_modify_assign has deleted a statement.  */
325
  int deleted;
326
 
327
  /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
328
     RHS reparately due to type conversions or nonexistent matching
329
     references.  */
330
  int separate_lhs_rhs_handling;
331
 
332
  /* Number of parameters that were removed because they were unused.  */
333
  int deleted_unused_parameters;
334
 
335
  /* Number of scalars passed as parameters by reference that have been
336
     converted to be passed by value.  */
337
  int scalar_by_ref_to_by_val;
338
 
339
  /* Number of aggregate parameters that were replaced by one or more of their
340
     components.  */
341
  int aggregate_params_reduced;
342
 
343
  /* Numbber of components created when splitting aggregate parameters.  */
344
  int param_reductions_created;
345
} sra_stats;
346
 
347
static void
348
dump_access (FILE *f, struct access *access, bool grp)
349
{
350
  fprintf (f, "access { ");
351
  fprintf (f, "base = (%d)'", DECL_UID (access->base));
352
  print_generic_expr (f, access->base, 0);
353
  fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
354
  fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
355
  fprintf (f, ", expr = ");
356
  print_generic_expr (f, access->expr, 0);
357
  fprintf (f, ", type = ");
358
  print_generic_expr (f, access->type, 0);
359
  if (grp)
360
    fprintf (f, ", grp_write = %d, total_scalarization = %d, "
361
             "grp_read = %d, grp_hint = %d, grp_assignment_read = %d,"
362
             "grp_covered = %d, grp_unscalarizable_region = %d, "
363
             "grp_unscalarized_data = %d, grp_partial_lhs = %d, "
364
             "grp_to_be_replaced = %d, grp_maybe_modified = %d, "
365
             "grp_not_necessarilly_dereferenced = %d\n",
366
             access->grp_write, access->total_scalarization,
367
             access->grp_read, access->grp_hint, access->grp_assignment_read,
368
             access->grp_covered, access->grp_unscalarizable_region,
369
             access->grp_unscalarized_data, access->grp_partial_lhs,
370
             access->grp_to_be_replaced, access->grp_maybe_modified,
371
             access->grp_not_necessarilly_dereferenced);
372
  else
373
    fprintf (f, ", write = %d, total_scalarization = %d, "
374
             "grp_partial_lhs = %d\n",
375
             access->write, access->total_scalarization,
376
             access->grp_partial_lhs);
377
}
378
 
379
/* Dump a subtree rooted in ACCESS to file F, indent by LEVEL.  */
380
 
381
static void
382
dump_access_tree_1 (FILE *f, struct access *access, int level)
383
{
384
  do
385
    {
386
      int i;
387
 
388
      for (i = 0; i < level; i++)
389
        fputs ("* ", dump_file);
390
 
391
      dump_access (f, access, true);
392
 
393
      if (access->first_child)
394
        dump_access_tree_1 (f, access->first_child, level + 1);
395
 
396
      access = access->next_sibling;
397
    }
398
  while (access);
399
}
400
 
401
/* Dump all access trees for a variable, given the pointer to the first root in
402
   ACCESS.  */
403
 
404
static void
405
dump_access_tree (FILE *f, struct access *access)
406
{
407
  for (; access; access = access->next_grp)
408
    dump_access_tree_1 (f, access, 0);
409
}
410
 
411
/* Return true iff ACC is non-NULL and has subaccesses.  */
412
 
413
static inline bool
414
access_has_children_p (struct access *acc)
415
{
416
  return acc && acc->first_child;
417
}
418
 
419
/* Return a vector of pointers to accesses for the variable given in BASE or
420
   NULL if there is none.  */
421
 
422
static VEC (access_p, heap) *
423
get_base_access_vector (tree base)
424
{
425
  void **slot;
426
 
427
  slot = pointer_map_contains (base_access_vec, base);
428
  if (!slot)
429
    return NULL;
430
  else
431
    return *(VEC (access_p, heap) **) slot;
432
}
433
 
434
/* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
435
   in ACCESS.  Return NULL if it cannot be found.  */
436
 
437
static struct access *
438
find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
439
                        HOST_WIDE_INT size)
440
{
441
  while (access && (access->offset != offset || access->size != size))
442
    {
443
      struct access *child = access->first_child;
444
 
445
      while (child && (child->offset + child->size <= offset))
446
        child = child->next_sibling;
447
      access = child;
448
    }
449
 
450
  return access;
451
}
452
 
453
/* Return the first group representative for DECL or NULL if none exists.  */
454
 
455
static struct access *
456
get_first_repr_for_decl (tree base)
457
{
458
  VEC (access_p, heap) *access_vec;
459
 
460
  access_vec = get_base_access_vector (base);
461
  if (!access_vec)
462
    return NULL;
463
 
464
  return VEC_index (access_p, access_vec, 0);
465
}
466
 
467
/* Find an access representative for the variable BASE and given OFFSET and
468
   SIZE.  Requires that access trees have already been built.  Return NULL if
469
   it cannot be found.  */
470
 
471
static struct access *
472
get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
473
                                 HOST_WIDE_INT size)
474
{
475
  struct access *access;
476
 
477
  access = get_first_repr_for_decl (base);
478
  while (access && (access->offset + access->size <= offset))
479
    access = access->next_grp;
480
  if (!access)
481
    return NULL;
482
 
483
  return find_access_in_subtree (access, offset, size);
484
}
485
 
486
/* Add LINK to the linked list of assign links of RACC.  */
487
static void
488
add_link_to_rhs (struct access *racc, struct assign_link *link)
489
{
490
  gcc_assert (link->racc == racc);
491
 
492
  if (!racc->first_link)
493
    {
494
      gcc_assert (!racc->last_link);
495
      racc->first_link = link;
496
    }
497
  else
498
    racc->last_link->next = link;
499
 
500
  racc->last_link = link;
501
  link->next = NULL;
502
}
503
 
504
/* Move all link structures in their linked list in OLD_RACC to the linked list
505
   in NEW_RACC.  */
506
static void
507
relink_to_new_repr (struct access *new_racc, struct access *old_racc)
508
{
509
  if (!old_racc->first_link)
510
    {
511
      gcc_assert (!old_racc->last_link);
512
      return;
513
    }
514
 
515
  if (new_racc->first_link)
516
    {
517
      gcc_assert (!new_racc->last_link->next);
518
      gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
519
 
520
      new_racc->last_link->next = old_racc->first_link;
521
      new_racc->last_link = old_racc->last_link;
522
    }
523
  else
524
    {
525
      gcc_assert (!new_racc->last_link);
526
 
527
      new_racc->first_link = old_racc->first_link;
528
      new_racc->last_link = old_racc->last_link;
529
    }
530
  old_racc->first_link = old_racc->last_link = NULL;
531
}
532
 
533
/* Add ACCESS to the work queue (which is actually a stack).  */
534
 
535
static void
536
add_access_to_work_queue (struct access *access)
537
{
538
  if (!access->grp_queued)
539
    {
540
      gcc_assert (!access->next_queued);
541
      access->next_queued = work_queue_head;
542
      access->grp_queued = 1;
543
      work_queue_head = access;
544
    }
545
}
546
 
547
/* Pop an access from the work queue, and return it, assuming there is one.  */
548
 
549
static struct access *
550
pop_access_from_work_queue (void)
551
{
552
  struct access *access = work_queue_head;
553
 
554
  work_queue_head = access->next_queued;
555
  access->next_queued = NULL;
556
  access->grp_queued = 0;
557
  return access;
558
}
559
 
560
 
561
/* Allocate necessary structures.  */
562
 
563
static void
564
sra_initialize (void)
565
{
566
  candidate_bitmap = BITMAP_ALLOC (NULL);
567
  should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
568
  cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
569
  gcc_obstack_init (&name_obstack);
570
  access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
571
  link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
572
  base_access_vec = pointer_map_create ();
573
  memset (&sra_stats, 0, sizeof (sra_stats));
574
  encountered_apply_args = false;
575
  encountered_unchangable_recursive_call = false;
576 378 julius
  cfg_changed = false;
577 280 jeremybenn
}
578
 
579
/* Hook fed to pointer_map_traverse, deallocate stored vectors.  */
580
 
581
static bool
582
delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
583
                     void *data ATTRIBUTE_UNUSED)
584
{
585
  VEC (access_p, heap) *access_vec;
586
  access_vec = (VEC (access_p, heap) *) *value;
587
  VEC_free (access_p, heap, access_vec);
588
 
589
  return true;
590
}
591
 
592
/* Deallocate all general structures.  */
593
 
594
static void
595
sra_deinitialize (void)
596
{
597
  BITMAP_FREE (candidate_bitmap);
598
  BITMAP_FREE (should_scalarize_away_bitmap);
599
  BITMAP_FREE (cannot_scalarize_away_bitmap);
600
  free_alloc_pool (access_pool);
601
  free_alloc_pool (link_pool);
602
  obstack_free (&name_obstack, NULL);
603
 
604
  pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
605
  pointer_map_destroy (base_access_vec);
606
}
607
 
608
/* Remove DECL from candidates for SRA and write REASON to the dump file if
609
   there is one.  */
610
static void
611
disqualify_candidate (tree decl, const char *reason)
612
{
613
  bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
614
 
615
  if (dump_file && (dump_flags & TDF_DETAILS))
616
    {
617
      fprintf (dump_file, "! Disqualifying ");
618
      print_generic_expr (dump_file, decl, 0);
619
      fprintf (dump_file, " - %s\n", reason);
620
    }
621
}
622
 
623
/* Return true iff the type contains a field or an element which does not allow
624
   scalarization.  */
625
 
626
static bool
627
type_internals_preclude_sra_p (tree type)
628
{
629
  tree fld;
630
  tree et;
631
 
632
  switch (TREE_CODE (type))
633
    {
634
    case RECORD_TYPE:
635
    case UNION_TYPE:
636
    case QUAL_UNION_TYPE:
637
      for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
638
        if (TREE_CODE (fld) == FIELD_DECL)
639
          {
640
            tree ft = TREE_TYPE (fld);
641
 
642
            if (TREE_THIS_VOLATILE (fld)
643
                || !DECL_FIELD_OFFSET (fld) || !DECL_SIZE (fld)
644
                || !host_integerp (DECL_FIELD_OFFSET (fld), 1)
645
                || !host_integerp (DECL_SIZE (fld), 1))
646
              return true;
647
 
648
            if (AGGREGATE_TYPE_P (ft)
649
                && type_internals_preclude_sra_p (ft))
650
              return true;
651
          }
652
 
653
      return false;
654
 
655
    case ARRAY_TYPE:
656
      et = TREE_TYPE (type);
657
 
658
      if (AGGREGATE_TYPE_P (et))
659
        return type_internals_preclude_sra_p (et);
660
      else
661
        return false;
662
 
663
    default:
664
      return false;
665
    }
666
}
667
 
668
/* If T is an SSA_NAME, return NULL if it is not a default def or return its
669
   base variable if it is.  Return T if it is not an SSA_NAME.  */
670
 
671
static tree
672
get_ssa_base_param (tree t)
673
{
674
  if (TREE_CODE (t) == SSA_NAME)
675
    {
676
      if (SSA_NAME_IS_DEFAULT_DEF (t))
677
        return SSA_NAME_VAR (t);
678
      else
679
        return NULL_TREE;
680
    }
681
  return t;
682
}
683
 
684
/* Mark a dereference of BASE of distance DIST in a basic block tht STMT
685
   belongs to, unless the BB has already been marked as a potentially
686
   final.  */
687
 
688
static void
689
mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
690
{
691
  basic_block bb = gimple_bb (stmt);
692
  int idx, parm_index = 0;
693
  tree parm;
694
 
695
  if (bitmap_bit_p (final_bbs, bb->index))
696
    return;
697
 
698
  for (parm = DECL_ARGUMENTS (current_function_decl);
699
       parm && parm != base;
700
       parm = TREE_CHAIN (parm))
701
    parm_index++;
702
 
703
  gcc_assert (parm_index < func_param_count);
704
 
705
  idx = bb->index * func_param_count + parm_index;
706
  if (bb_dereferences[idx] < dist)
707
    bb_dereferences[idx] = dist;
708
}
709
 
710
/* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
711
   the three fields.  Also add it to the vector of accesses corresponding to
712
   the base.  Finally, return the new access.  */
713
 
714
static struct access *
715
create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
716
{
717
  VEC (access_p, heap) *vec;
718
  struct access *access;
719
  void **slot;
720
 
721
  access = (struct access *) pool_alloc (access_pool);
722
  memset (access, 0, sizeof (struct access));
723
  access->base = base;
724
  access->offset = offset;
725
  access->size = size;
726
 
727
  slot = pointer_map_contains (base_access_vec, base);
728
  if (slot)
729
    vec = (VEC (access_p, heap) *) *slot;
730
  else
731
    vec = VEC_alloc (access_p, heap, 32);
732
 
733
  VEC_safe_push (access_p, heap, vec, access);
734
 
735
  *((struct VEC (access_p,heap) **)
736
        pointer_map_insert (base_access_vec, base)) = vec;
737
 
738
  return access;
739
}
740
 
741
/* Create and insert access for EXPR. Return created access, or NULL if it is
742
   not possible.  */
743
 
744
static struct access *
745
create_access (tree expr, gimple stmt, bool write)
746
{
747
  struct access *access;
748
  HOST_WIDE_INT offset, size, max_size;
749
  tree base = expr;
750
  bool ptr, unscalarizable_region = false;
751
 
752
  base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
753
 
754
  if (sra_mode == SRA_MODE_EARLY_IPA && INDIRECT_REF_P (base))
755
    {
756
      base = get_ssa_base_param (TREE_OPERAND (base, 0));
757
      if (!base)
758
        return NULL;
759
      ptr = true;
760
    }
761
  else
762
    ptr = false;
763
 
764
  if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
765
    return NULL;
766
 
767
  if (sra_mode == SRA_MODE_EARLY_IPA)
768
    {
769
      if (size < 0 || size != max_size)
770
        {
771
          disqualify_candidate (base, "Encountered a variable sized access.");
772
          return NULL;
773
        }
774
      if ((offset % BITS_PER_UNIT) != 0 || (size % BITS_PER_UNIT) != 0)
775
        {
776
          disqualify_candidate (base,
777
                                "Encountered an acces not aligned to a byte.");
778
          return NULL;
779
        }
780
 
781
      if (ptr)
782
        mark_parm_dereference (base, offset + size, stmt);
783
    }
784
  else
785
    {
786
      if (size != max_size)
787
        {
788
          size = max_size;
789
          unscalarizable_region = true;
790
        }
791
      if (size < 0)
792
        {
793
          disqualify_candidate (base, "Encountered an unconstrained access.");
794
          return NULL;
795
        }
796
    }
797
 
798
  access = create_access_1 (base, offset, size);
799
  access->expr = expr;
800
  access->type = TREE_TYPE (expr);
801
  access->write = write;
802
  access->grp_unscalarizable_region = unscalarizable_region;
803
  access->stmt = stmt;
804
 
805
  return access;
806
}
807
 
808
 
809
/* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
810
   register types or (recursively) records with only these two kinds of fields.
811
   It also returns false if any of these records has a zero-size field as its
812
   last field.  */
813
 
814
static bool
815
type_consists_of_records_p (tree type)
816
{
817
  tree fld;
818
  bool last_fld_has_zero_size = false;
819
 
820
  if (TREE_CODE (type) != RECORD_TYPE)
821
    return false;
822
 
823
  for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
824
    if (TREE_CODE (fld) == FIELD_DECL)
825
      {
826
        tree ft = TREE_TYPE (fld);
827
 
828
        if (!is_gimple_reg_type (ft)
829
            && !type_consists_of_records_p (ft))
830
          return false;
831
 
832
        last_fld_has_zero_size = tree_low_cst (DECL_SIZE (fld), 1) == 0;
833
      }
834
 
835
  if (last_fld_has_zero_size)
836
    return false;
837
 
838
  return true;
839
}
840
 
841
/* Create total_scalarization accesses for all scalar type fields in DECL that
842
   must be of a RECORD_TYPE conforming to type_consists_of_records_p.  BASE
843
   must be the top-most VAR_DECL representing the variable, OFFSET must be the
844
   offset of DECL within BASE.  */
845
 
846
static void
847
completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset)
848
{
849
  tree fld, decl_type = TREE_TYPE (decl);
850
 
851
  for (fld = TYPE_FIELDS (decl_type); fld; fld = TREE_CHAIN (fld))
852
    if (TREE_CODE (fld) == FIELD_DECL)
853
      {
854
        HOST_WIDE_INT pos = offset + int_bit_position (fld);
855
        tree ft = TREE_TYPE (fld);
856
 
857
        if (is_gimple_reg_type (ft))
858
          {
859
            struct access *access;
860
            HOST_WIDE_INT size;
861
            tree expr;
862
            bool ok;
863
 
864
            size = tree_low_cst (DECL_SIZE (fld), 1);
865
            expr = base;
866
            ok = build_ref_for_offset (&expr, TREE_TYPE (base), pos,
867
                                       ft, false);
868
            gcc_assert (ok);
869
 
870
            access = create_access_1 (base, pos, size);
871
            access->expr = expr;
872
            access->type = ft;
873
            access->total_scalarization = 1;
874
            /* Accesses for intraprocedural SRA can have their stmt NULL.  */
875
          }
876
        else
877
          completely_scalarize_record (base, fld, pos);
878
      }
879
}
880
 
881
 
882
/* Search the given tree for a declaration by skipping handled components and
883
   exclude it from the candidates.  */
884
 
885
static void
886
disqualify_base_of_expr (tree t, const char *reason)
887
{
888
  while (handled_component_p (t))
889
    t = TREE_OPERAND (t, 0);
890
 
891
  if (sra_mode == SRA_MODE_EARLY_IPA)
892
    {
893
      if (INDIRECT_REF_P (t))
894
        t = TREE_OPERAND (t, 0);
895
      t = get_ssa_base_param (t);
896
    }
897
 
898
  if (t && DECL_P (t))
899
    disqualify_candidate (t, reason);
900
}
901
 
902
/* Scan expression EXPR and create access structures for all accesses to
903
   candidates for scalarization.  Return the created access or NULL if none is
904
   created.  */
905
 
906
static struct access *
907
build_access_from_expr_1 (tree *expr_ptr, gimple stmt, bool write)
908
{
909
  struct access *ret = NULL;
910
  tree expr = *expr_ptr;
911
  bool partial_ref;
912
 
913
  if (TREE_CODE (expr) == BIT_FIELD_REF
914
      || TREE_CODE (expr) == IMAGPART_EXPR
915
      || TREE_CODE (expr) == REALPART_EXPR)
916
    {
917
      expr = TREE_OPERAND (expr, 0);
918
      partial_ref = true;
919
    }
920
  else
921
    partial_ref = false;
922
 
923
  /* We need to dive through V_C_Es in order to get the size of its parameter
924
     and not the result type.  Ada produces such statements.  We are also
925
     capable of handling the topmost V_C_E but not any of those buried in other
926
     handled components.  */
927
  if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
928
    expr = TREE_OPERAND (expr, 0);
929
 
930
  if (contains_view_convert_expr_p (expr))
931
    {
932
      disqualify_base_of_expr (expr, "V_C_E under a different handled "
933
                               "component.");
934
      return NULL;
935
    }
936
 
937
  switch (TREE_CODE (expr))
938
    {
939
    case INDIRECT_REF:
940
      if (sra_mode != SRA_MODE_EARLY_IPA)
941
        return NULL;
942
      /* fall through */
943
    case VAR_DECL:
944
    case PARM_DECL:
945
    case RESULT_DECL:
946
    case COMPONENT_REF:
947
    case ARRAY_REF:
948
    case ARRAY_RANGE_REF:
949
      ret = create_access (expr, stmt, write);
950
      break;
951
 
952
    default:
953
      break;
954
    }
955
 
956
  if (write && partial_ref && ret)
957
    ret->grp_partial_lhs = 1;
958
 
959
  return ret;
960
}
961
 
962
/* Callback of scan_function.  Scan expression EXPR and create access
963
   structures for all accesses to candidates for scalarization.  Return true if
964
   any access has been inserted.  */
965
 
966
static bool
967
build_access_from_expr (tree *expr_ptr,
968
                        gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED, bool write,
969
                        void *data ATTRIBUTE_UNUSED)
970
{
971
  struct access *access;
972
 
973
  access = build_access_from_expr_1 (expr_ptr, gsi_stmt (*gsi), write);
974
  if (access)
975
    {
976
      /* This means the aggregate is accesses as a whole in a way other than an
977
         assign statement and thus cannot be removed even if we had a scalar
978
         replacement for everything.  */
979
      if (cannot_scalarize_away_bitmap)
980
        bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
981
      return true;
982
    }
983
  return false;
984
}
985
 
986
/* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
987
   modes in which it matters, return true iff they have been disqualified.  RHS
988
   may be NULL, in that case ignore it.  If we scalarize an aggregate in
989
   intra-SRA we may need to add statements after each statement.  This is not
990
   possible if a statement unconditionally has to end the basic block.  */
991
static bool
992
disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
993
{
994
  if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
995
      && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
996
    {
997
      disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
998
      if (rhs)
999
        disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1000
      return true;
1001
    }
1002
  return false;
1003
}
1004
 
1005
 
1006
/* Result code for scan_assign callback for scan_function.  */
1007
enum scan_assign_result { SRA_SA_NONE,       /* nothing done for the stmt */
1008
                          SRA_SA_PROCESSED,  /* stmt analyzed/changed */
1009
                          SRA_SA_REMOVED };  /* stmt redundant and eliminated */
1010
 
1011
 
1012
/* Callback of scan_function.  Scan expressions occuring in the statement
1013
   pointed to by STMT_EXPR, create access structures for all accesses to
1014
   candidates for scalarization and remove those candidates which occur in
1015
   statements or expressions that prevent them from being split apart.  Return
1016
   true if any access has been inserted.  */
1017
 
1018
static enum scan_assign_result
1019
build_accesses_from_assign (gimple *stmt_ptr,
1020
                            gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
1021
                            void *data ATTRIBUTE_UNUSED)
1022
{
1023
  gimple stmt = *stmt_ptr;
1024
  tree *lhs_ptr, *rhs_ptr;
1025
  struct access *lacc, *racc;
1026
 
1027
  if (!gimple_assign_single_p (stmt))
1028
    return SRA_SA_NONE;
1029
 
1030
  lhs_ptr = gimple_assign_lhs_ptr (stmt);
1031
  rhs_ptr = gimple_assign_rhs1_ptr (stmt);
1032
 
1033
  if (disqualify_ops_if_throwing_stmt (stmt, *lhs_ptr, *rhs_ptr))
1034
    return SRA_SA_NONE;
1035
 
1036
  racc = build_access_from_expr_1 (rhs_ptr, stmt, false);
1037
  lacc = build_access_from_expr_1 (lhs_ptr, stmt, true);
1038
 
1039
  if (racc)
1040
    {
1041
      racc->grp_assignment_read = 1;
1042
      if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1043
          && !is_gimple_reg_type (racc->type))
1044
        bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1045
    }
1046
 
1047
  if (lacc && racc
1048
      && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1049
      && !lacc->grp_unscalarizable_region
1050
      && !racc->grp_unscalarizable_region
1051
      && AGGREGATE_TYPE_P (TREE_TYPE (*lhs_ptr))
1052
      /* FIXME: Turn the following line into an assert after PR 40058 is
1053
         fixed.  */
1054
      && lacc->size == racc->size
1055
      && useless_type_conversion_p (lacc->type, racc->type))
1056
    {
1057
      struct assign_link *link;
1058
 
1059
      link = (struct assign_link *) pool_alloc (link_pool);
1060
      memset (link, 0, sizeof (struct assign_link));
1061
 
1062
      link->lacc = lacc;
1063
      link->racc = racc;
1064
 
1065
      add_link_to_rhs (racc, link);
1066
    }
1067
 
1068
  return (lacc || racc) ? SRA_SA_PROCESSED : SRA_SA_NONE;
1069
}
1070
 
1071
/* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1072
   GIMPLE_ASM operands with memory constrains which cannot be scalarized.  */
1073
 
1074
static bool
1075
asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1076
                void *data ATTRIBUTE_UNUSED)
1077
{
1078
  if (DECL_P (op))
1079
    disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1080
 
1081
  return false;
1082
}
1083
 
1084
/* Return true iff callsite CALL has at least as many actual arguments as there
1085
   are formal parameters of the function currently processed by IPA-SRA.  */
1086
 
1087
static inline bool
1088
callsite_has_enough_arguments_p (gimple call)
1089
{
1090
  return gimple_call_num_args (call) >= (unsigned) func_param_count;
1091
}
1092
 
1093
/* Scan function and look for interesting statements. Return true if any has
1094
   been found or processed, as indicated by callbacks.  SCAN_EXPR is a callback
1095
   called on all expressions within statements except assign statements and
1096
   those deemed entirely unsuitable for some reason (all operands in such
1097
   statements and expression are removed from candidate_bitmap).  SCAN_ASSIGN
1098
   is a callback called on all assign statements, HANDLE_SSA_DEFS is a callback
1099
   called on assign statements and those call statements which have a lhs, it
1100
   can be NULL.  ANALYSIS_STAGE is true when running in the analysis stage of a
1101
   pass and thus no statement is being modified.  DATA is a pointer passed to
1102
   all callbacks.  If any single callback returns true, this function also
1103
   returns true, otherwise it returns false.  */
1104
 
1105
static bool
1106
scan_function (bool (*scan_expr) (tree *, gimple_stmt_iterator *, bool, void *),
1107
               enum scan_assign_result (*scan_assign) (gimple *,
1108
                                                       gimple_stmt_iterator *,
1109
                                                       void *),
1110
               bool (*handle_ssa_defs)(gimple, void *),
1111
               bool analysis_stage, void *data)
1112
{
1113
  gimple_stmt_iterator gsi;
1114
  basic_block bb;
1115
  unsigned i;
1116
  tree *t;
1117
  bool ret = false;
1118
 
1119
  FOR_EACH_BB (bb)
1120
    {
1121
      if (handle_ssa_defs)
1122
        for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1123
          ret |= handle_ssa_defs (gsi_stmt (gsi), data);
1124
 
1125
      gsi = gsi_start_bb (bb);
1126
      while (!gsi_end_p (gsi))
1127
        {
1128
          gimple stmt = gsi_stmt (gsi);
1129
          enum scan_assign_result assign_result;
1130
          bool any = false, deleted = false;
1131
 
1132
          if (analysis_stage && final_bbs && stmt_can_throw_external (stmt))
1133
            bitmap_set_bit (final_bbs, bb->index);
1134
          switch (gimple_code (stmt))
1135
            {
1136
            case GIMPLE_RETURN:
1137
              t = gimple_return_retval_ptr (stmt);
1138
              if (*t != NULL_TREE)
1139
                any |= scan_expr (t, &gsi, false, data);
1140
              if (analysis_stage && final_bbs)
1141
                bitmap_set_bit (final_bbs, bb->index);
1142
              break;
1143
 
1144
            case GIMPLE_ASSIGN:
1145
              assign_result = scan_assign (&stmt, &gsi, data);
1146
              any |= assign_result == SRA_SA_PROCESSED;
1147
              deleted = assign_result == SRA_SA_REMOVED;
1148
              if (handle_ssa_defs && assign_result != SRA_SA_REMOVED)
1149
                any |= handle_ssa_defs (stmt, data);
1150
              break;
1151
 
1152
            case GIMPLE_CALL:
1153
              /* Operands must be processed before the lhs.  */
1154
              for (i = 0; i < gimple_call_num_args (stmt); i++)
1155
                {
1156
                  tree *argp = gimple_call_arg_ptr (stmt, i);
1157
                  any |= scan_expr (argp, &gsi, false, data);
1158
                }
1159
 
1160
              if (analysis_stage && sra_mode == SRA_MODE_EARLY_IPA)
1161
                {
1162
                  tree dest = gimple_call_fndecl (stmt);
1163
                  int flags = gimple_call_flags (stmt);
1164
 
1165
                  if (dest)
1166
                    {
1167
                      if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1168
                          && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1169
                        encountered_apply_args = true;
1170
                      if (cgraph_get_node (dest)
1171
                          == cgraph_get_node (current_function_decl)
1172
                          && !callsite_has_enough_arguments_p (stmt))
1173
                        encountered_unchangable_recursive_call = true;
1174
                    }
1175
 
1176
                  if (final_bbs
1177
                      && (flags & (ECF_CONST | ECF_PURE)) == 0)
1178
                    bitmap_set_bit (final_bbs, bb->index);
1179
                }
1180
 
1181
              if (gimple_call_lhs (stmt))
1182
                {
1183
                  tree *lhs_ptr = gimple_call_lhs_ptr (stmt);
1184
                  if (!analysis_stage
1185
                      || !disqualify_ops_if_throwing_stmt (stmt,
1186
                                                           *lhs_ptr, NULL))
1187
                    {
1188
                      any |= scan_expr (lhs_ptr, &gsi, true, data);
1189
                      if (handle_ssa_defs)
1190
                        any |= handle_ssa_defs (stmt, data);
1191
                    }
1192
                }
1193
              break;
1194
 
1195
            case GIMPLE_ASM:
1196
              if (analysis_stage)
1197
                {
1198
                  walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1199
                                                 asm_visit_addr);
1200
                  if (final_bbs)
1201
                    bitmap_set_bit (final_bbs, bb->index);
1202
                }
1203
              for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1204
                {
1205
                  tree *op = &TREE_VALUE (gimple_asm_input_op (stmt, i));
1206
                  any |= scan_expr (op, &gsi, false, data);
1207
                }
1208
              for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1209
                {
1210
                  tree *op = &TREE_VALUE (gimple_asm_output_op (stmt, i));
1211
                  any |= scan_expr (op, &gsi, true, data);
1212
                }
1213
              break;
1214
 
1215
            default:
1216
              break;
1217
            }
1218
 
1219
          if (any)
1220
            {
1221
              ret = true;
1222
 
1223
              if (!analysis_stage)
1224
                {
1225
                  update_stmt (stmt);
1226 378 julius
                  if (maybe_clean_eh_stmt (stmt)
1227
                      && gimple_purge_dead_eh_edges (bb))
1228
                    cfg_changed = true;
1229 280 jeremybenn
                }
1230
            }
1231 378 julius
          if (!deleted)
1232
            gsi_next (&gsi);
1233 280 jeremybenn
        }
1234
    }
1235
 
1236
  return ret;
1237
}
1238
 
1239
/* Helper of QSORT function. There are pointers to accesses in the array.  An
1240
   access is considered smaller than another if it has smaller offset or if the
1241
   offsets are the same but is size is bigger. */
1242
 
1243
static int
1244
compare_access_positions (const void *a, const void *b)
1245
{
1246
  const access_p *fp1 = (const access_p *) a;
1247
  const access_p *fp2 = (const access_p *) b;
1248
  const access_p f1 = *fp1;
1249
  const access_p f2 = *fp2;
1250
 
1251
  if (f1->offset != f2->offset)
1252
    return f1->offset < f2->offset ? -1 : 1;
1253
 
1254
  if (f1->size == f2->size)
1255
    {
1256
      if (f1->type == f2->type)
1257
        return 0;
1258
      /* Put any non-aggregate type before any aggregate type.  */
1259
      else if (!is_gimple_reg_type (f1->type)
1260
          && is_gimple_reg_type (f2->type))
1261
        return 1;
1262
      else if (is_gimple_reg_type (f1->type)
1263
               && !is_gimple_reg_type (f2->type))
1264
        return -1;
1265
      /* Put any complex or vector type before any other scalar type.  */
1266
      else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1267
               && TREE_CODE (f1->type) != VECTOR_TYPE
1268
               && (TREE_CODE (f2->type) == COMPLEX_TYPE
1269
                   || TREE_CODE (f2->type) == VECTOR_TYPE))
1270
        return 1;
1271
      else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1272
                || TREE_CODE (f1->type) == VECTOR_TYPE)
1273
               && TREE_CODE (f2->type) != COMPLEX_TYPE
1274
               && TREE_CODE (f2->type) != VECTOR_TYPE)
1275
        return -1;
1276
      /* Put the integral type with the bigger precision first.  */
1277
      else if (INTEGRAL_TYPE_P (f1->type)
1278
               && INTEGRAL_TYPE_P (f2->type))
1279
        return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1280
      /* Put any integral type with non-full precision last.  */
1281
      else if (INTEGRAL_TYPE_P (f1->type)
1282
               && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1283
                   != TYPE_PRECISION (f1->type)))
1284
        return 1;
1285
      else if (INTEGRAL_TYPE_P (f2->type)
1286
               && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1287
                   != TYPE_PRECISION (f2->type)))
1288
        return -1;
1289
      /* Stabilize the sort.  */
1290
      return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1291
    }
1292
 
1293
  /* We want the bigger accesses first, thus the opposite operator in the next
1294
     line: */
1295
  return f1->size > f2->size ? -1 : 1;
1296
}
1297
 
1298
 
1299
/* Append a name of the declaration to the name obstack.  A helper function for
1300
   make_fancy_name.  */
1301
 
1302
static void
1303
make_fancy_decl_name (tree decl)
1304
{
1305
  char buffer[32];
1306
 
1307
  tree name = DECL_NAME (decl);
1308
  if (name)
1309
    obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1310
                  IDENTIFIER_LENGTH (name));
1311
  else
1312
    {
1313
      sprintf (buffer, "D%u", DECL_UID (decl));
1314
      obstack_grow (&name_obstack, buffer, strlen (buffer));
1315
    }
1316
}
1317
 
1318
/* Helper for make_fancy_name.  */
1319
 
1320
static void
1321
make_fancy_name_1 (tree expr)
1322
{
1323
  char buffer[32];
1324
  tree index;
1325
 
1326
  if (DECL_P (expr))
1327
    {
1328
      make_fancy_decl_name (expr);
1329
      return;
1330
    }
1331
 
1332
  switch (TREE_CODE (expr))
1333
    {
1334
    case COMPONENT_REF:
1335
      make_fancy_name_1 (TREE_OPERAND (expr, 0));
1336
      obstack_1grow (&name_obstack, '$');
1337
      make_fancy_decl_name (TREE_OPERAND (expr, 1));
1338
      break;
1339
 
1340
    case ARRAY_REF:
1341
      make_fancy_name_1 (TREE_OPERAND (expr, 0));
1342
      obstack_1grow (&name_obstack, '$');
1343
      /* Arrays with only one element may not have a constant as their
1344
         index. */
1345
      index = TREE_OPERAND (expr, 1);
1346
      if (TREE_CODE (index) != INTEGER_CST)
1347
        break;
1348
      sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1349
      obstack_grow (&name_obstack, buffer, strlen (buffer));
1350
 
1351
      break;
1352
 
1353
    case BIT_FIELD_REF:
1354
    case REALPART_EXPR:
1355
    case IMAGPART_EXPR:
1356
      gcc_unreachable ();       /* we treat these as scalars.  */
1357
      break;
1358
    default:
1359
      break;
1360
    }
1361
}
1362
 
1363
/* Create a human readable name for replacement variable of ACCESS.  */
1364
 
1365
static char *
1366
make_fancy_name (tree expr)
1367
{
1368
  make_fancy_name_1 (expr);
1369
  obstack_1grow (&name_obstack, '\0');
1370
  return XOBFINISH (&name_obstack, char *);
1371
}
1372
 
1373
/* Helper function for build_ref_for_offset.  */
1374
 
1375
static bool
1376
build_ref_for_offset_1 (tree *res, tree type, HOST_WIDE_INT offset,
1377
                        tree exp_type)
1378
{
1379
  while (1)
1380
    {
1381
      tree fld;
1382
      tree tr_size, index, minidx;
1383
      HOST_WIDE_INT el_size;
1384
 
1385
      if (offset == 0 && exp_type
1386
          && types_compatible_p (exp_type, type))
1387
        return true;
1388
 
1389
      switch (TREE_CODE (type))
1390
        {
1391
        case UNION_TYPE:
1392
        case QUAL_UNION_TYPE:
1393
        case RECORD_TYPE:
1394
          for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
1395
            {
1396
              HOST_WIDE_INT pos, size;
1397
              tree expr, *expr_ptr;
1398
 
1399
              if (TREE_CODE (fld) != FIELD_DECL)
1400
                continue;
1401
 
1402
              pos = int_bit_position (fld);
1403
              gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1404
              tr_size = DECL_SIZE (fld);
1405
              if (!tr_size || !host_integerp (tr_size, 1))
1406
                continue;
1407
              size = tree_low_cst (tr_size, 1);
1408
              if (size == 0)
1409
                {
1410
                  if (pos != offset)
1411
                    continue;
1412
                }
1413
              else if (pos > offset || (pos + size) <= offset)
1414
                continue;
1415
 
1416
              if (res)
1417
                {
1418
                  expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1419
                                 NULL_TREE);
1420
                  expr_ptr = &expr;
1421
                }
1422
              else
1423
                expr_ptr = NULL;
1424
              if (build_ref_for_offset_1 (expr_ptr, TREE_TYPE (fld),
1425
                                          offset - pos, exp_type))
1426
                {
1427
                  if (res)
1428
                    *res = expr;
1429
                  return true;
1430
                }
1431
            }
1432
          return false;
1433
 
1434
        case ARRAY_TYPE:
1435
          tr_size = TYPE_SIZE (TREE_TYPE (type));
1436
          if (!tr_size || !host_integerp (tr_size, 1))
1437
            return false;
1438
          el_size = tree_low_cst (tr_size, 1);
1439
 
1440
          minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1441
          if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1442
            return false;
1443
          if (res)
1444
            {
1445
              index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1446
              if (!integer_zerop (minidx))
1447
                index = int_const_binop (PLUS_EXPR, index, minidx, 0);
1448
              *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1449
                             NULL_TREE, NULL_TREE);
1450
            }
1451
          offset = offset % el_size;
1452
          type = TREE_TYPE (type);
1453
          break;
1454
 
1455
        default:
1456
          if (offset != 0)
1457
            return false;
1458
 
1459
          if (exp_type)
1460
            return false;
1461
          else
1462
            return true;
1463
        }
1464
    }
1465
}
1466
 
1467
/* Construct an expression that would reference a part of aggregate *EXPR of
1468
   type TYPE at the given OFFSET of the type EXP_TYPE.  If EXPR is NULL, the
1469
   function only determines whether it can build such a reference without
1470
   actually doing it, otherwise, the tree it points to is unshared first and
1471
   then used as a base for furhter sub-references.
1472
 
1473
   FIXME: Eventually this should be replaced with
1474
   maybe_fold_offset_to_reference() from tree-ssa-ccp.c but that requires a
1475
   minor rewrite of fold_stmt.
1476
 */
1477
 
1478
bool
1479
build_ref_for_offset (tree *expr, tree type, HOST_WIDE_INT offset,
1480
                      tree exp_type, bool allow_ptr)
1481
{
1482
  location_t loc = expr ? EXPR_LOCATION (*expr) : UNKNOWN_LOCATION;
1483
 
1484
  if (expr)
1485
    *expr = unshare_expr (*expr);
1486
 
1487
  if (allow_ptr && POINTER_TYPE_P (type))
1488
    {
1489
      type = TREE_TYPE (type);
1490
      if (expr)
1491
        *expr = fold_build1_loc (loc, INDIRECT_REF, type, *expr);
1492
    }
1493
 
1494
  return build_ref_for_offset_1 (expr, type, offset, exp_type);
1495
}
1496
 
1497
/* Return true iff TYPE is stdarg va_list type.  */
1498
 
1499
static inline bool
1500
is_va_list_type (tree type)
1501
{
1502
  return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1503
}
1504
 
1505
/* The very first phase of intraprocedural SRA.  It marks in candidate_bitmap
1506
   those with type which is suitable for scalarization.  */
1507
 
1508
static bool
1509
find_var_candidates (void)
1510
{
1511
  tree var, type;
1512
  referenced_var_iterator rvi;
1513
  bool ret = false;
1514
 
1515
  FOR_EACH_REFERENCED_VAR (var, rvi)
1516
    {
1517
      if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1518
        continue;
1519
      type = TREE_TYPE (var);
1520
 
1521
      if (!AGGREGATE_TYPE_P (type)
1522
          || needs_to_live_in_memory (var)
1523
          || TREE_THIS_VOLATILE (var)
1524
          || !COMPLETE_TYPE_P (type)
1525
          || !host_integerp (TYPE_SIZE (type), 1)
1526
          || tree_low_cst (TYPE_SIZE (type), 1) == 0
1527
          || type_internals_preclude_sra_p (type)
1528
          /* Fix for PR 41089.  tree-stdarg.c needs to have va_lists intact but
1529
              we also want to schedule it rather late.  Thus we ignore it in
1530
              the early pass. */
1531
          || (sra_mode == SRA_MODE_EARLY_INTRA
1532
              && is_va_list_type (type)))
1533
        continue;
1534
 
1535
      bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1536
 
1537
      if (dump_file && (dump_flags & TDF_DETAILS))
1538
        {
1539
          fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1540
          print_generic_expr (dump_file, var, 0);
1541
          fprintf (dump_file, "\n");
1542
        }
1543
      ret = true;
1544
    }
1545
 
1546
  return ret;
1547
}
1548
 
1549
/* Sort all accesses for the given variable, check for partial overlaps and
1550
   return NULL if there are any.  If there are none, pick a representative for
1551
   each combination of offset and size and create a linked list out of them.
1552
   Return the pointer to the first representative and make sure it is the first
1553
   one in the vector of accesses.  */
1554
 
1555
static struct access *
1556
sort_and_splice_var_accesses (tree var)
1557
{
1558
  int i, j, access_count;
1559
  struct access *res, **prev_acc_ptr = &res;
1560
  VEC (access_p, heap) *access_vec;
1561
  bool first = true;
1562
  HOST_WIDE_INT low = -1, high = 0;
1563
 
1564
  access_vec = get_base_access_vector (var);
1565
  if (!access_vec)
1566
    return NULL;
1567
  access_count = VEC_length (access_p, access_vec);
1568
 
1569
  /* Sort by <OFFSET, SIZE>.  */
1570
  qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
1571
         compare_access_positions);
1572
 
1573
  i = 0;
1574
  while (i < access_count)
1575
    {
1576
      struct access *access = VEC_index (access_p, access_vec, i);
1577
      bool grp_write = access->write;
1578
      bool grp_read = !access->write;
1579
      bool grp_assignment_read = access->grp_assignment_read;
1580
      bool multiple_reads = false;
1581
      bool total_scalarization = access->total_scalarization;
1582
      bool grp_partial_lhs = access->grp_partial_lhs;
1583
      bool first_scalar = is_gimple_reg_type (access->type);
1584
      bool unscalarizable_region = access->grp_unscalarizable_region;
1585
 
1586
      if (first || access->offset >= high)
1587
        {
1588
          first = false;
1589
          low = access->offset;
1590
          high = access->offset + access->size;
1591
        }
1592
      else if (access->offset > low && access->offset + access->size > high)
1593
        return NULL;
1594
      else
1595
        gcc_assert (access->offset >= low
1596
                    && access->offset + access->size <= high);
1597
 
1598
      j = i + 1;
1599
      while (j < access_count)
1600
        {
1601
          struct access *ac2 = VEC_index (access_p, access_vec, j);
1602
          if (ac2->offset != access->offset || ac2->size != access->size)
1603
            break;
1604
          if (ac2->write)
1605
            grp_write = true;
1606
          else
1607
            {
1608
              if (grp_read)
1609
                multiple_reads = true;
1610
              else
1611
                grp_read = true;
1612
            }
1613
          grp_assignment_read |= ac2->grp_assignment_read;
1614
          grp_partial_lhs |= ac2->grp_partial_lhs;
1615
          unscalarizable_region |= ac2->grp_unscalarizable_region;
1616
          total_scalarization |= ac2->total_scalarization;
1617
          relink_to_new_repr (access, ac2);
1618
 
1619
          /* If there are both aggregate-type and scalar-type accesses with
1620
             this combination of size and offset, the comparison function
1621
             should have put the scalars first.  */
1622
          gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1623
          ac2->group_representative = access;
1624
          j++;
1625
        }
1626
 
1627
      i = j;
1628
 
1629
      access->group_representative = access;
1630
      access->grp_write = grp_write;
1631
      access->grp_read = grp_read;
1632
      access->grp_assignment_read = grp_assignment_read;
1633
      access->grp_hint = multiple_reads || total_scalarization;
1634
      access->grp_partial_lhs = grp_partial_lhs;
1635
      access->grp_unscalarizable_region = unscalarizable_region;
1636
      if (access->first_link)
1637
        add_access_to_work_queue (access);
1638
 
1639
      *prev_acc_ptr = access;
1640
      prev_acc_ptr = &access->next_grp;
1641
    }
1642
 
1643
  gcc_assert (res == VEC_index (access_p, access_vec, 0));
1644
  return res;
1645
}
1646
 
1647
/* Create a variable for the given ACCESS which determines the type, name and a
1648
   few other properties.  Return the variable declaration and store it also to
1649
   ACCESS->replacement.  */
1650
 
1651
static tree
1652
create_access_replacement (struct access *access, bool rename)
1653
{
1654
  tree repl;
1655
 
1656
  repl = create_tmp_var (access->type, "SR");
1657
  get_var_ann (repl);
1658
  add_referenced_var (repl);
1659
  if (rename)
1660
    mark_sym_for_renaming (repl);
1661
 
1662
  if (!access->grp_partial_lhs
1663
      && (TREE_CODE (access->type) == COMPLEX_TYPE
1664
          || TREE_CODE (access->type) == VECTOR_TYPE))
1665
    DECL_GIMPLE_REG_P (repl) = 1;
1666
 
1667
  DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1668
  DECL_ARTIFICIAL (repl) = 1;
1669
  DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1670
 
1671
  if (DECL_NAME (access->base)
1672
      && !DECL_IGNORED_P (access->base)
1673
      && !DECL_ARTIFICIAL (access->base))
1674
    {
1675
      char *pretty_name = make_fancy_name (access->expr);
1676
 
1677
      DECL_NAME (repl) = get_identifier (pretty_name);
1678
      obstack_free (&name_obstack, pretty_name);
1679
 
1680
      SET_DECL_DEBUG_EXPR (repl, access->expr);
1681
      DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1682
      TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1683
    }
1684
  else
1685
    TREE_NO_WARNING (repl) = 1;
1686
 
1687
  if (dump_file)
1688
    {
1689
      fprintf (dump_file, "Created a replacement for ");
1690
      print_generic_expr (dump_file, access->base, 0);
1691
      fprintf (dump_file, " offset: %u, size: %u: ",
1692
               (unsigned) access->offset, (unsigned) access->size);
1693
      print_generic_expr (dump_file, repl, 0);
1694
      fprintf (dump_file, "\n");
1695
    }
1696
  sra_stats.replacements++;
1697
 
1698
  return repl;
1699
}
1700
 
1701
/* Return ACCESS scalar replacement, create it if it does not exist yet.  */
1702
 
1703
static inline tree
1704
get_access_replacement (struct access *access)
1705
{
1706
  gcc_assert (access->grp_to_be_replaced);
1707
 
1708
  if (!access->replacement_decl)
1709
    access->replacement_decl = create_access_replacement (access, true);
1710
  return access->replacement_decl;
1711
}
1712
 
1713
/* Return ACCESS scalar replacement, create it if it does not exist yet but do
1714
   not mark it for renaming.  */
1715
 
1716
static inline tree
1717
get_unrenamed_access_replacement (struct access *access)
1718
{
1719
  gcc_assert (!access->grp_to_be_replaced);
1720
 
1721
  if (!access->replacement_decl)
1722
    access->replacement_decl = create_access_replacement (access, false);
1723
  return access->replacement_decl;
1724
}
1725
 
1726
/* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1727
   linked list along the way.  Stop when *ACCESS is NULL or the access pointed
1728
   to it is not "within" the root.  Return false iff some accesses partially
1729
   overlap.  */
1730
 
1731
static bool
1732
build_access_subtree (struct access **access)
1733
{
1734
  struct access *root = *access, *last_child = NULL;
1735
  HOST_WIDE_INT limit = root->offset + root->size;
1736
 
1737
  *access = (*access)->next_grp;
1738
  while  (*access && (*access)->offset + (*access)->size <= limit)
1739
    {
1740
      if (!last_child)
1741
        root->first_child = *access;
1742
      else
1743
        last_child->next_sibling = *access;
1744
      last_child = *access;
1745
 
1746
      if (!build_access_subtree (access))
1747
        return false;
1748
    }
1749
 
1750
  if (*access && (*access)->offset < limit)
1751
    return false;
1752
 
1753
  return true;
1754
}
1755
 
1756
/* Build a tree of access representatives, ACCESS is the pointer to the first
1757
   one, others are linked in a list by the next_grp field.  Return false iff
1758
   some accesses partially overlap.  */
1759
 
1760
static bool
1761
build_access_trees (struct access *access)
1762
{
1763
  while (access)
1764
    {
1765
      struct access *root = access;
1766
 
1767
      if (!build_access_subtree (&access))
1768
        return false;
1769
      root->next_grp = access;
1770
    }
1771
  return true;
1772
}
1773
 
1774
/* Return true if expr contains some ARRAY_REFs into a variable bounded
1775
   array.  */
1776
 
1777
static bool
1778
expr_with_var_bounded_array_refs_p (tree expr)
1779
{
1780
  while (handled_component_p (expr))
1781
    {
1782
      if (TREE_CODE (expr) == ARRAY_REF
1783
          && !host_integerp (array_ref_low_bound (expr), 0))
1784
        return true;
1785
      expr = TREE_OPERAND (expr, 0);
1786
    }
1787
  return false;
1788
}
1789
 
1790
enum mark_read_status { SRA_MR_NOT_READ, SRA_MR_READ, SRA_MR_ASSIGN_READ};
1791
 
1792
/* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1793
   both seeming beneficial and when ALLOW_REPLACEMENTS allows it.  Also set all
1794
   sorts of access flags appropriately along the way, notably always set
1795
   grp_read and grp_assign_read according to MARK_READ and grp_write when
1796
   MARK_WRITE is true.  */
1797
 
1798
static bool
1799
analyze_access_subtree (struct access *root, bool allow_replacements,
1800
                        enum mark_read_status mark_read, bool mark_write)
1801
{
1802
  struct access *child;
1803
  HOST_WIDE_INT limit = root->offset + root->size;
1804
  HOST_WIDE_INT covered_to = root->offset;
1805
  bool scalar = is_gimple_reg_type (root->type);
1806
  bool hole = false, sth_created = false;
1807
  bool direct_read = root->grp_read;
1808
 
1809
  if (mark_read == SRA_MR_ASSIGN_READ)
1810
    {
1811
      root->grp_read = 1;
1812
      root->grp_assignment_read = 1;
1813
    }
1814
  if (mark_read == SRA_MR_READ)
1815
    root->grp_read = 1;
1816
  else if (root->grp_assignment_read)
1817
    mark_read = SRA_MR_ASSIGN_READ;
1818
  else if (root->grp_read)
1819
    mark_read = SRA_MR_READ;
1820
 
1821
  if (mark_write)
1822
    root->grp_write = true;
1823
  else if (root->grp_write)
1824
    mark_write = true;
1825
 
1826
  if (root->grp_unscalarizable_region)
1827
    allow_replacements = false;
1828
 
1829
  if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
1830
    allow_replacements = false;
1831
 
1832
  for (child = root->first_child; child; child = child->next_sibling)
1833
    {
1834
      if (!hole && child->offset < covered_to)
1835
        hole = true;
1836
      else
1837
        covered_to += child->size;
1838
 
1839
      sth_created |= analyze_access_subtree (child,
1840
                                             allow_replacements && !scalar,
1841
                                             mark_read, mark_write);
1842
 
1843
      root->grp_unscalarized_data |= child->grp_unscalarized_data;
1844
      hole |= !child->grp_covered;
1845
    }
1846
 
1847
  if (allow_replacements && scalar && !root->first_child
1848
      && (root->grp_hint
1849
          || (root->grp_write && (direct_read || root->grp_assignment_read)))
1850
      /* We must not ICE later on when trying to build an access to the
1851
         original data within the aggregate even when it is impossible to do in
1852
         a defined way like in the PR 42703 testcase.  Therefore we check
1853
         pre-emptively here that we will be able to do that.  */
1854
      && build_ref_for_offset (NULL, TREE_TYPE (root->base), root->offset,
1855
                               root->type, false))
1856
    {
1857
      if (dump_file && (dump_flags & TDF_DETAILS))
1858
        {
1859
          fprintf (dump_file, "Marking ");
1860
          print_generic_expr (dump_file, root->base, 0);
1861
          fprintf (dump_file, " offset: %u, size: %u: ",
1862
                   (unsigned) root->offset, (unsigned) root->size);
1863
          fprintf (dump_file, " to be replaced.\n");
1864
        }
1865
 
1866
      root->grp_to_be_replaced = 1;
1867
      sth_created = true;
1868
      hole = false;
1869
    }
1870
  else if (covered_to < limit)
1871
    hole = true;
1872
 
1873
  if (sth_created && !hole)
1874
    {
1875
      root->grp_covered = 1;
1876
      return true;
1877
    }
1878
  if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
1879
    root->grp_unscalarized_data = 1; /* not covered and written to */
1880
  if (sth_created)
1881
    return true;
1882
  return false;
1883
}
1884
 
1885
/* Analyze all access trees linked by next_grp by the means of
1886
   analyze_access_subtree.  */
1887
static bool
1888
analyze_access_trees (struct access *access)
1889
{
1890
  bool ret = false;
1891
 
1892
  while (access)
1893
    {
1894
      if (analyze_access_subtree (access, true, SRA_MR_NOT_READ, false))
1895
        ret = true;
1896
      access = access->next_grp;
1897
    }
1898
 
1899
  return ret;
1900
}
1901
 
1902
/* Return true iff a potential new child of LACC at offset OFFSET and with size
1903
   SIZE would conflict with an already existing one.  If exactly such a child
1904
   already exists in LACC, store a pointer to it in EXACT_MATCH.  */
1905
 
1906
static bool
1907
child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
1908
                              HOST_WIDE_INT size, struct access **exact_match)
1909
{
1910
  struct access *child;
1911
 
1912
  for (child = lacc->first_child; child; child = child->next_sibling)
1913
    {
1914
      if (child->offset == norm_offset && child->size == size)
1915
        {
1916
          *exact_match = child;
1917
          return true;
1918
        }
1919
 
1920
      if (child->offset < norm_offset + size
1921
          && child->offset + child->size > norm_offset)
1922
        return true;
1923
    }
1924
 
1925
  return false;
1926
}
1927
 
1928
/* Create a new child access of PARENT, with all properties just like MODEL
1929
   except for its offset and with its grp_write false and grp_read true.
1930
   Return the new access or NULL if it cannot be created.  Note that this access
1931
   is created long after all splicing and sorting, it's not located in any
1932
   access vector and is automatically a representative of its group.  */
1933
 
1934
static struct access *
1935
create_artificial_child_access (struct access *parent, struct access *model,
1936
                                HOST_WIDE_INT new_offset)
1937
{
1938
  struct access *access;
1939
  struct access **child;
1940
  tree expr = parent->base;;
1941
 
1942
  gcc_assert (!model->grp_unscalarizable_region);
1943
 
1944
  if (!build_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
1945
                             model->type, false))
1946
    return NULL;
1947
 
1948
  access = (struct access *) pool_alloc (access_pool);
1949
  memset (access, 0, sizeof (struct access));
1950
  access->base = parent->base;
1951
  access->expr = expr;
1952
  access->offset = new_offset;
1953
  access->size = model->size;
1954
  access->type = model->type;
1955
  access->grp_write = true;
1956
  access->grp_read = false;
1957
 
1958
  child = &parent->first_child;
1959
  while (*child && (*child)->offset < new_offset)
1960
    child = &(*child)->next_sibling;
1961
 
1962
  access->next_sibling = *child;
1963
  *child = access;
1964
 
1965
  return access;
1966
}
1967
 
1968
 
1969
/* Propagate all subaccesses of RACC across an assignment link to LACC. Return
1970
   true if any new subaccess was created.  Additionally, if RACC is a scalar
1971
   access but LACC is not, change the type of the latter, if possible.  */
1972
 
1973
static bool
1974
propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
1975
{
1976
  struct access *rchild;
1977
  HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
1978
  bool ret = false;
1979
 
1980
  if (is_gimple_reg_type (lacc->type)
1981
      || lacc->grp_unscalarizable_region
1982
      || racc->grp_unscalarizable_region)
1983
    return false;
1984
 
1985
  if (!lacc->first_child && !racc->first_child
1986
      && is_gimple_reg_type (racc->type))
1987
    {
1988
      tree t = lacc->base;
1989
 
1990
      if (build_ref_for_offset (&t, TREE_TYPE (t), lacc->offset, racc->type,
1991
                                false))
1992
        {
1993
          lacc->expr = t;
1994
          lacc->type = racc->type;
1995
        }
1996
      return false;
1997
    }
1998
 
1999
  for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2000
    {
2001
      struct access *new_acc = NULL;
2002
      HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2003
 
2004
      if (rchild->grp_unscalarizable_region)
2005
        continue;
2006
 
2007
      if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2008
                                        &new_acc))
2009
        {
2010
          if (new_acc)
2011
            {
2012
              rchild->grp_hint = 1;
2013
              new_acc->grp_hint |= new_acc->grp_read;
2014
              if (rchild->first_child)
2015
                ret |= propagate_subaccesses_across_link (new_acc, rchild);
2016
            }
2017
          continue;
2018
        }
2019
 
2020
      /* If a (part of) a union field is on the RHS of an assignment, it can
2021
         have sub-accesses which do not make sense on the LHS (PR 40351).
2022
         Check that this is not the case.  */
2023
      if (!build_ref_for_offset (NULL, TREE_TYPE (lacc->base), norm_offset,
2024
                                 rchild->type, false))
2025
        continue;
2026
 
2027
      rchild->grp_hint = 1;
2028
      new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2029
      if (new_acc)
2030
        {
2031
          ret = true;
2032
          if (racc->first_child)
2033
            propagate_subaccesses_across_link (new_acc, rchild);
2034
        }
2035
    }
2036
 
2037
  return ret;
2038
}
2039
 
2040
/* Propagate all subaccesses across assignment links.  */
2041
 
2042
static void
2043
propagate_all_subaccesses (void)
2044
{
2045
  while (work_queue_head)
2046
    {
2047
      struct access *racc = pop_access_from_work_queue ();
2048
      struct assign_link *link;
2049
 
2050
      gcc_assert (racc->first_link);
2051
 
2052
      for (link = racc->first_link; link; link = link->next)
2053
        {
2054
          struct access *lacc = link->lacc;
2055
 
2056
          if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2057
            continue;
2058
          lacc = lacc->group_representative;
2059
          if (propagate_subaccesses_across_link (lacc, racc)
2060
              && lacc->first_link)
2061
            add_access_to_work_queue (lacc);
2062
        }
2063
    }
2064
}
2065
 
2066
/* Go through all accesses collected throughout the (intraprocedural) analysis
2067
   stage, exclude overlapping ones, identify representatives and build trees
2068
   out of them, making decisions about scalarization on the way.  Return true
2069
   iff there are any to-be-scalarized variables after this stage. */
2070
 
2071
static bool
2072
analyze_all_variable_accesses (void)
2073
{
2074
  int res = 0;
2075
  bitmap tmp = BITMAP_ALLOC (NULL);
2076
  bitmap_iterator bi;
2077
  unsigned i, max_total_scalarization_size;
2078
 
2079
  max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2080
    * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2081
 
2082
  EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2083
    if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2084
        && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2085
      {
2086
        tree var = referenced_var (i);
2087
 
2088
        if (TREE_CODE (var) == VAR_DECL
2089
            && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2090
                <= max_total_scalarization_size)
2091
            && type_consists_of_records_p (TREE_TYPE (var)))
2092
          {
2093
            completely_scalarize_record (var, var, 0);
2094
            if (dump_file && (dump_flags & TDF_DETAILS))
2095
              {
2096
                fprintf (dump_file, "Will attempt to totally scalarize ");
2097
                print_generic_expr (dump_file, var, 0);
2098
                fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2099
              }
2100
          }
2101
      }
2102
 
2103
  bitmap_copy (tmp, candidate_bitmap);
2104
  EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2105
    {
2106
      tree var = referenced_var (i);
2107
      struct access *access;
2108
 
2109
      access = sort_and_splice_var_accesses (var);
2110
      if (!access || !build_access_trees (access))
2111
        disqualify_candidate (var,
2112
                              "No or inhibitingly overlapping accesses.");
2113
    }
2114
 
2115
  propagate_all_subaccesses ();
2116
 
2117
  bitmap_copy (tmp, candidate_bitmap);
2118
  EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2119
    {
2120
      tree var = referenced_var (i);
2121
      struct access *access = get_first_repr_for_decl (var);
2122
 
2123
      if (analyze_access_trees (access))
2124
        {
2125
          res++;
2126
          if (dump_file && (dump_flags & TDF_DETAILS))
2127
            {
2128
              fprintf (dump_file, "\nAccess trees for ");
2129
              print_generic_expr (dump_file, var, 0);
2130
              fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2131
              dump_access_tree (dump_file, access);
2132
              fprintf (dump_file, "\n");
2133
            }
2134
        }
2135
      else
2136
        disqualify_candidate (var, "No scalar replacements to be created.");
2137
    }
2138
 
2139
  BITMAP_FREE (tmp);
2140
 
2141
  if (res)
2142
    {
2143
      statistics_counter_event (cfun, "Scalarized aggregates", res);
2144
      return true;
2145
    }
2146
  else
2147
    return false;
2148
}
2149
 
2150
/* Return true iff a reference statement into aggregate AGG can be built for
2151
   every single to-be-replaced accesses that is a child of ACCESS, its sibling
2152
   or a child of its sibling. TOP_OFFSET is the offset from the processed
2153
   access subtree that has to be subtracted from offset of each access.  */
2154
 
2155
static bool
2156
ref_expr_for_all_replacements_p (struct access *access, tree agg,
2157
                                 HOST_WIDE_INT top_offset)
2158
{
2159
  do
2160
    {
2161
      if (access->grp_to_be_replaced
2162
          && !build_ref_for_offset (NULL, TREE_TYPE (agg),
2163
                                    access->offset - top_offset,
2164
                                    access->type, false))
2165
        return false;
2166
 
2167
      if (access->first_child
2168
          && !ref_expr_for_all_replacements_p (access->first_child, agg,
2169
                                               top_offset))
2170
        return false;
2171
 
2172
      access = access->next_sibling;
2173
    }
2174
  while (access);
2175
 
2176
  return true;
2177
}
2178
 
2179
/* Generate statements copying scalar replacements of accesses within a subtree
2180
   into or out of AGG.  ACCESS is the first child of the root of the subtree to
2181
   be processed.  AGG is an aggregate type expression (can be a declaration but
2182
   does not have to be, it can for example also be an indirect_ref).
2183
   TOP_OFFSET is the offset of the processed subtree which has to be subtracted
2184
   from offsets of individual accesses to get corresponding offsets for AGG.
2185
   If CHUNK_SIZE is non-null, copy only replacements in the interval
2186
   <start_offset, start_offset + chunk_size>, otherwise copy all.  GSI is a
2187
   statement iterator used to place the new statements.  WRITE should be true
2188
   when the statements should write from AGG to the replacement and false if
2189
   vice versa.  if INSERT_AFTER is true, new statements will be added after the
2190
   current statement in GSI, they will be added before the statement
2191
   otherwise.  */
2192
 
2193
static void
2194
generate_subtree_copies (struct access *access, tree agg,
2195
                         HOST_WIDE_INT top_offset,
2196
                         HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2197
                         gimple_stmt_iterator *gsi, bool write,
2198
                         bool insert_after)
2199
{
2200
  do
2201
    {
2202
      tree expr = agg;
2203
 
2204
      if (chunk_size && access->offset >= start_offset + chunk_size)
2205
        return;
2206
 
2207
      if (access->grp_to_be_replaced
2208
          && (chunk_size == 0
2209
              || access->offset + access->size > start_offset))
2210
        {
2211
          tree repl = get_access_replacement (access);
2212
          bool ref_found;
2213
          gimple stmt;
2214
 
2215
          ref_found = build_ref_for_offset (&expr, TREE_TYPE (agg),
2216
                                             access->offset - top_offset,
2217
                                             access->type, false);
2218
          gcc_assert (ref_found);
2219
 
2220
          if (write)
2221
            {
2222
              if (access->grp_partial_lhs)
2223
                expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2224
                                                 !insert_after,
2225
                                                 insert_after ? GSI_NEW_STMT
2226
                                                 : GSI_SAME_STMT);
2227
              stmt = gimple_build_assign (repl, expr);
2228
            }
2229
          else
2230
            {
2231
              TREE_NO_WARNING (repl) = 1;
2232
              if (access->grp_partial_lhs)
2233
                repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2234
                                                 !insert_after,
2235
                                                 insert_after ? GSI_NEW_STMT
2236
                                                 : GSI_SAME_STMT);
2237
              stmt = gimple_build_assign (expr, repl);
2238
            }
2239
 
2240
          if (insert_after)
2241
            gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2242
          else
2243
            gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2244
          update_stmt (stmt);
2245
          sra_stats.subtree_copies++;
2246
        }
2247
 
2248
      if (access->first_child)
2249
        generate_subtree_copies (access->first_child, agg, top_offset,
2250
                                 start_offset, chunk_size, gsi,
2251
                                 write, insert_after);
2252
 
2253
      access = access->next_sibling;
2254
    }
2255
  while (access);
2256
}
2257
 
2258
/* Assign zero to all scalar replacements in an access subtree.  ACCESS is the
2259
   the root of the subtree to be processed.  GSI is the statement iterator used
2260
   for inserting statements which are added after the current statement if
2261
   INSERT_AFTER is true or before it otherwise.  */
2262
 
2263
static void
2264
init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2265
                        bool insert_after)
2266
 
2267
{
2268
  struct access *child;
2269
 
2270
  if (access->grp_to_be_replaced)
2271
    {
2272
      gimple stmt;
2273
 
2274
      stmt = gimple_build_assign (get_access_replacement (access),
2275
                                  fold_convert (access->type,
2276
                                                integer_zero_node));
2277
      if (insert_after)
2278
        gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2279
      else
2280
        gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2281
      update_stmt (stmt);
2282
    }
2283
 
2284
  for (child = access->first_child; child; child = child->next_sibling)
2285
    init_subtree_with_zero (child, gsi, insert_after);
2286
}
2287
 
2288
/* Search for an access representative for the given expression EXPR and
2289
   return it or NULL if it cannot be found.  */
2290
 
2291
static struct access *
2292
get_access_for_expr (tree expr)
2293
{
2294
  HOST_WIDE_INT offset, size, max_size;
2295
  tree base;
2296
 
2297
  /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2298
     a different size than the size of its argument and we need the latter
2299
     one.  */
2300
  if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2301
    expr = TREE_OPERAND (expr, 0);
2302
 
2303
  base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2304
  if (max_size == -1 || !DECL_P (base))
2305
    return NULL;
2306
 
2307
  if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2308
    return NULL;
2309
 
2310
  return get_var_base_offset_size_access (base, offset, max_size);
2311
}
2312
 
2313
/* Callback for scan_function.  Replace the expression EXPR with a scalar
2314
   replacement if there is one and generate other statements to do type
2315
   conversion or subtree copying if necessary.  GSI is used to place newly
2316
   created statements, WRITE is true if the expression is being written to (it
2317
   is on a LHS of a statement or output in an assembly statement).  */
2318
 
2319
static bool
2320
sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write,
2321
                 void *data ATTRIBUTE_UNUSED)
2322
{
2323
  struct access *access;
2324
  tree type, bfr;
2325
 
2326
  if (TREE_CODE (*expr) == BIT_FIELD_REF)
2327
    {
2328
      bfr = *expr;
2329
      expr = &TREE_OPERAND (*expr, 0);
2330
    }
2331
  else
2332
    bfr = NULL_TREE;
2333
 
2334
  if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2335
    expr = &TREE_OPERAND (*expr, 0);
2336
  access = get_access_for_expr (*expr);
2337
  if (!access)
2338
    return false;
2339
  type = TREE_TYPE (*expr);
2340
 
2341
  if (access->grp_to_be_replaced)
2342
    {
2343
      tree repl = get_access_replacement (access);
2344
      /* If we replace a non-register typed access simply use the original
2345
         access expression to extract the scalar component afterwards.
2346
         This happens if scalarizing a function return value or parameter
2347
         like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2348
         gcc.c-torture/compile/20011217-1.c.
2349
 
2350
         We also want to use this when accessing a complex or vector which can
2351
         be accessed as a different type too, potentially creating a need for
2352
         type conversion (see PR42196) and when scalarized unions are involved
2353
         in assembler statements (see PR42398).  */
2354
      if (!useless_type_conversion_p (type, access->type))
2355
        {
2356
          tree ref = access->base;
2357
          bool ok;
2358
 
2359
          ok = build_ref_for_offset (&ref, TREE_TYPE (ref),
2360
                                     access->offset, access->type, false);
2361
          gcc_assert (ok);
2362
 
2363
          if (write)
2364
            {
2365
              gimple stmt;
2366
 
2367
              if (access->grp_partial_lhs)
2368
                ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2369
                                                 false, GSI_NEW_STMT);
2370
              stmt = gimple_build_assign (repl, ref);
2371
              gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2372
            }
2373
          else
2374
            {
2375
              gimple stmt;
2376
 
2377
              if (access->grp_partial_lhs)
2378
                repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2379
                                                 true, GSI_SAME_STMT);
2380
              stmt = gimple_build_assign (ref, repl);
2381
              gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2382
            }
2383
        }
2384
      else
2385
        *expr = repl;
2386
      sra_stats.exprs++;
2387
    }
2388
 
2389
  if (access->first_child)
2390
    {
2391
      HOST_WIDE_INT start_offset, chunk_size;
2392
      if (bfr
2393
          && host_integerp (TREE_OPERAND (bfr, 1), 1)
2394
          && host_integerp (TREE_OPERAND (bfr, 2), 1))
2395
        {
2396
          chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2397
          start_offset = access->offset
2398
            + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2399
        }
2400
      else
2401
        start_offset = chunk_size = 0;
2402
 
2403
      generate_subtree_copies (access->first_child, access->base, 0,
2404
                               start_offset, chunk_size, gsi, write, write);
2405
    }
2406
  return true;
2407
}
2408
 
2409
/* Where scalar replacements of the RHS have been written to when a replacement
2410
   of a LHS of an assigments cannot be direclty loaded from a replacement of
2411
   the RHS. */
2412
enum unscalarized_data_handling { SRA_UDH_NONE,  /* Nothing done so far. */
2413
                                  SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2414
                                  SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2415
 
2416
/* Store all replacements in the access tree rooted in TOP_RACC either to their
2417
   base aggregate if there are unscalarized data or directly to LHS
2418
   otherwise.  */
2419
 
2420
static enum unscalarized_data_handling
2421
handle_unscalarized_data_in_subtree (struct access *top_racc, tree lhs,
2422
                                     gimple_stmt_iterator *gsi)
2423
{
2424
  if (top_racc->grp_unscalarized_data)
2425
    {
2426
      generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2427
                               gsi, false, false);
2428
      return SRA_UDH_RIGHT;
2429
    }
2430
  else
2431
    {
2432
      generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2433
                               0, 0, gsi, false, false);
2434
      return SRA_UDH_LEFT;
2435
    }
2436
}
2437
 
2438
 
2439
/* Try to generate statements to load all sub-replacements in an access
2440
   (sub)tree (LACC is the first child) from scalar replacements in the TOP_RACC
2441
   (sub)tree.  If that is not possible, refresh the TOP_RACC base aggregate and
2442
   load the accesses from it.  LEFT_OFFSET is the offset of the left whole
2443
   subtree being copied, RIGHT_OFFSET is the same thing for the right subtree.
2444
   NEW_GSI is stmt iterator used for statement insertions after the original
2445
   assignment, OLD_GSI is used to insert statements before the assignment.
2446
   *REFRESHED keeps the information whether we have needed to refresh
2447
   replacements of the LHS and from which side of the assignments this takes
2448
   place.  */
2449
 
2450
static void
2451
load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2452
                                 HOST_WIDE_INT left_offset,
2453
                                 HOST_WIDE_INT right_offset,
2454
                                 gimple_stmt_iterator *old_gsi,
2455
                                 gimple_stmt_iterator *new_gsi,
2456
                                 enum unscalarized_data_handling *refreshed,
2457
                                 tree lhs)
2458
{
2459
  location_t loc = EXPR_LOCATION (lacc->expr);
2460
  do
2461
    {
2462
      if (lacc->grp_to_be_replaced)
2463
        {
2464
          struct access *racc;
2465
          HOST_WIDE_INT offset = lacc->offset - left_offset + right_offset;
2466
          gimple stmt;
2467
          tree rhs;
2468
 
2469
          racc = find_access_in_subtree (top_racc, offset, lacc->size);
2470
          if (racc && racc->grp_to_be_replaced)
2471
            {
2472
              rhs = get_access_replacement (racc);
2473
              if (!useless_type_conversion_p (lacc->type, racc->type))
2474
                rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2475
            }
2476
          else
2477
            {
2478
              /* No suitable access on the right hand side, need to load from
2479
                 the aggregate.  See if we have to update it first... */
2480
              if (*refreshed == SRA_UDH_NONE)
2481
                *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2482
                                                                  lhs, old_gsi);
2483
 
2484
              if (*refreshed == SRA_UDH_LEFT)
2485
                {
2486
                  bool repl_found;
2487
 
2488
                  rhs = lacc->base;
2489
                  repl_found = build_ref_for_offset (&rhs, TREE_TYPE (rhs),
2490
                                                     lacc->offset, lacc->type,
2491
                                                     false);
2492
                  gcc_assert (repl_found);
2493
                }
2494
              else
2495
                {
2496
                  bool repl_found;
2497
 
2498
                  rhs = top_racc->base;
2499
                  repl_found = build_ref_for_offset (&rhs,
2500
                                                     TREE_TYPE (top_racc->base),
2501
                                                     offset, lacc->type, false);
2502
                  gcc_assert (repl_found);
2503
                }
2504
            }
2505
 
2506
          stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2507
          gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2508
          update_stmt (stmt);
2509
          sra_stats.subreplacements++;
2510
        }
2511
      else if (*refreshed == SRA_UDH_NONE
2512
               && lacc->grp_read && !lacc->grp_covered)
2513
        *refreshed = handle_unscalarized_data_in_subtree (top_racc, lhs,
2514
                                                          old_gsi);
2515
 
2516
      if (lacc->first_child)
2517
        load_assign_lhs_subreplacements (lacc->first_child, top_racc,
2518
                                         left_offset, right_offset,
2519
                                         old_gsi, new_gsi, refreshed, lhs);
2520
      lacc = lacc->next_sibling;
2521
    }
2522
  while (lacc);
2523
}
2524
 
2525
/* Modify assignments with a CONSTRUCTOR on their RHS.  STMT contains a pointer
2526
   to the assignment and GSI is the statement iterator pointing at it.  Returns
2527
   the same values as sra_modify_assign.  */
2528
 
2529
static enum scan_assign_result
2530
sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2531
{
2532
  tree lhs = gimple_assign_lhs (*stmt);
2533
  struct access *acc;
2534
 
2535
  acc = get_access_for_expr (lhs);
2536
  if (!acc)
2537
    return SRA_SA_NONE;
2538
 
2539
  if (VEC_length (constructor_elt,
2540
                  CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2541
    {
2542
      /* I have never seen this code path trigger but if it can happen the
2543
         following should handle it gracefully.  */
2544
      if (access_has_children_p (acc))
2545
        generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2546
                                 true, true);
2547
      return SRA_SA_PROCESSED;
2548
    }
2549
 
2550
  if (acc->grp_covered)
2551
    {
2552
      init_subtree_with_zero (acc, gsi, false);
2553
      unlink_stmt_vdef (*stmt);
2554
      gsi_remove (gsi, true);
2555
      return SRA_SA_REMOVED;
2556
    }
2557
  else
2558
    {
2559
      init_subtree_with_zero (acc, gsi, true);
2560
      return SRA_SA_PROCESSED;
2561
    }
2562
}
2563
 
2564
/* Create a new suitable default definition SSA_NAME and replace all uses of
2565
   SSA with it, RACC is access describing the uninitialized part of an
2566
   aggregate that is being loaded.  */
2567
 
2568
static void
2569
replace_uses_with_default_def_ssa_name (tree ssa, struct access *racc)
2570
{
2571
  tree repl, decl;
2572
 
2573
  decl = get_unrenamed_access_replacement (racc);
2574
 
2575
  repl = gimple_default_def (cfun, decl);
2576
  if (!repl)
2577
    {
2578
      repl = make_ssa_name (decl, gimple_build_nop ());
2579
      set_default_def (decl, repl);
2580
    }
2581
 
2582
  replace_uses_by (ssa, repl);
2583
}
2584
 
2585
/* Callback of scan_function to process assign statements.  It examines both
2586
   sides of the statement, replaces them with a scalare replacement if there is
2587
   one and generating copying of replacements if scalarized aggregates have been
2588
   used in the assignment.  STMT is a pointer to the assign statement, GSI is
2589
   used to hold generated statements for type conversions and subtree
2590
   copying.  */
2591
 
2592
static enum scan_assign_result
2593
sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
2594
                   void *data ATTRIBUTE_UNUSED)
2595
{
2596
  struct access *lacc, *racc;
2597
  tree lhs, rhs;
2598
  bool modify_this_stmt = false;
2599
  bool force_gimple_rhs = false;
2600
  location_t loc = gimple_location (*stmt);
2601
  gimple_stmt_iterator orig_gsi = *gsi;
2602
 
2603
  if (!gimple_assign_single_p (*stmt))
2604
    return SRA_SA_NONE;
2605
  lhs = gimple_assign_lhs (*stmt);
2606
  rhs = gimple_assign_rhs1 (*stmt);
2607
 
2608
  if (TREE_CODE (rhs) == CONSTRUCTOR)
2609
    return sra_modify_constructor_assign (stmt, gsi);
2610
 
2611
  if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2612
      || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2613
      || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2614
    {
2615
      modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2616
                                          gsi, false, data);
2617
      modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2618
                                           gsi, true, data);
2619
      return modify_this_stmt ? SRA_SA_PROCESSED : SRA_SA_NONE;
2620
    }
2621
 
2622
  lacc = get_access_for_expr (lhs);
2623
  racc = get_access_for_expr (rhs);
2624
  if (!lacc && !racc)
2625
    return SRA_SA_NONE;
2626
 
2627
  if (lacc && lacc->grp_to_be_replaced)
2628
    {
2629
      lhs = get_access_replacement (lacc);
2630
      gimple_assign_set_lhs (*stmt, lhs);
2631
      modify_this_stmt = true;
2632
      if (lacc->grp_partial_lhs)
2633
        force_gimple_rhs = true;
2634
      sra_stats.exprs++;
2635
    }
2636
 
2637
  if (racc && racc->grp_to_be_replaced)
2638
    {
2639
      rhs = get_access_replacement (racc);
2640
      modify_this_stmt = true;
2641
      if (racc->grp_partial_lhs)
2642
        force_gimple_rhs = true;
2643
      sra_stats.exprs++;
2644
    }
2645
 
2646
  if (modify_this_stmt)
2647
    {
2648
      if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2649
        {
2650
          /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2651
             ???  This should move to fold_stmt which we simply should
2652
             call after building a VIEW_CONVERT_EXPR here.  */
2653
          if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2654
              && !access_has_children_p (lacc))
2655
            {
2656
              tree expr = lhs;
2657
              if (build_ref_for_offset (&expr, TREE_TYPE (lhs), 0,
2658
                                        TREE_TYPE (rhs), false))
2659
                {
2660
                  lhs = expr;
2661
                  gimple_assign_set_lhs (*stmt, expr);
2662
                }
2663
            }
2664
          else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2665
                   && !access_has_children_p (racc))
2666
            {
2667
              tree expr = rhs;
2668
              if (build_ref_for_offset (&expr, TREE_TYPE (rhs), 0,
2669
                                        TREE_TYPE (lhs), false))
2670
                rhs = expr;
2671
            }
2672
          if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2673
            {
2674
              rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2675
              if (is_gimple_reg_type (TREE_TYPE (lhs))
2676
                  && TREE_CODE (lhs) != SSA_NAME)
2677
                force_gimple_rhs = true;
2678
            }
2679
        }
2680
    }
2681
 
2682
  /* From this point on, the function deals with assignments in between
2683
     aggregates when at least one has scalar reductions of some of its
2684
     components.  There are three possible scenarios: Both the LHS and RHS have
2685
     to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2686
 
2687
     In the first case, we would like to load the LHS components from RHS
2688
     components whenever possible.  If that is not possible, we would like to
2689
     read it directly from the RHS (after updating it by storing in it its own
2690
     components).  If there are some necessary unscalarized data in the LHS,
2691
     those will be loaded by the original assignment too.  If neither of these
2692
     cases happen, the original statement can be removed.  Most of this is done
2693
     by load_assign_lhs_subreplacements.
2694
 
2695
     In the second case, we would like to store all RHS scalarized components
2696
     directly into LHS and if they cover the aggregate completely, remove the
2697
     statement too.  In the third case, we want the LHS components to be loaded
2698
     directly from the RHS (DSE will remove the original statement if it
2699
     becomes redundant).
2700
 
2701
     This is a bit complex but manageable when types match and when unions do
2702
     not cause confusion in a way that we cannot really load a component of LHS
2703
     from the RHS or vice versa (the access representing this level can have
2704
     subaccesses that are accessible only through a different union field at a
2705
     higher level - different from the one used in the examined expression).
2706
     Unions are fun.
2707
 
2708
     Therefore, I specially handle a fourth case, happening when there is a
2709
     specific type cast or it is impossible to locate a scalarized subaccess on
2710
     the other side of the expression.  If that happens, I simply "refresh" the
2711
     RHS by storing in it is scalarized components leave the original statement
2712
     there to do the copying and then load the scalar replacements of the LHS.
2713
     This is what the first branch does.  */
2714
 
2715
  if (gimple_has_volatile_ops (*stmt)
2716
      || contains_view_convert_expr_p (rhs)
2717
      || contains_view_convert_expr_p (lhs)
2718
      || (access_has_children_p (racc)
2719
          && !ref_expr_for_all_replacements_p (racc, lhs, racc->offset))
2720
      || (access_has_children_p (lacc)
2721
          && !ref_expr_for_all_replacements_p (lacc, rhs, lacc->offset)))
2722
    {
2723
      if (access_has_children_p (racc))
2724
        generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
2725
                                 gsi, false, false);
2726
      if (access_has_children_p (lacc))
2727
        generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
2728
                                 gsi, true, true);
2729
      sra_stats.separate_lhs_rhs_handling++;
2730
    }
2731
  else
2732
    {
2733
      if (access_has_children_p (lacc) && access_has_children_p (racc))
2734
        {
2735
          gimple_stmt_iterator orig_gsi = *gsi;
2736
          enum unscalarized_data_handling refreshed;
2737
 
2738
          if (lacc->grp_read && !lacc->grp_covered)
2739
            refreshed = handle_unscalarized_data_in_subtree (racc, lhs, gsi);
2740
          else
2741
            refreshed = SRA_UDH_NONE;
2742
 
2743
          load_assign_lhs_subreplacements (lacc->first_child, racc,
2744
                                           lacc->offset, racc->offset,
2745
                                           &orig_gsi, gsi, &refreshed, lhs);
2746
          if (refreshed != SRA_UDH_RIGHT)
2747
            {
2748
              gsi_next (gsi);
2749
              unlink_stmt_vdef (*stmt);
2750
              gsi_remove (&orig_gsi, true);
2751
              sra_stats.deleted++;
2752
              return SRA_SA_REMOVED;
2753
            }
2754
        }
2755
      else
2756
        {
2757
          if (racc)
2758
            {
2759
              if (!racc->grp_to_be_replaced && !racc->grp_unscalarized_data)
2760
                {
2761
                  if (racc->first_child)
2762
                    generate_subtree_copies (racc->first_child, lhs,
2763
                                             racc->offset, 0, 0, gsi,
2764
                                             false, false);
2765
                  gcc_assert (*stmt == gsi_stmt (*gsi));
2766
                  if (TREE_CODE (lhs) == SSA_NAME)
2767
                    replace_uses_with_default_def_ssa_name (lhs, racc);
2768
 
2769
                  unlink_stmt_vdef (*stmt);
2770
                  gsi_remove (gsi, true);
2771
                  sra_stats.deleted++;
2772
                  return SRA_SA_REMOVED;
2773
                }
2774
              else if (racc->first_child)
2775
                generate_subtree_copies (racc->first_child, lhs,
2776
                                         racc->offset, 0, 0, gsi, false, true);
2777
            }
2778
          if (access_has_children_p (lacc))
2779
            generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
2780
                                     0, 0, gsi, true, true);
2781
        }
2782
    }
2783
 
2784
  /* This gimplification must be done after generate_subtree_copies, lest we
2785
     insert the subtree copies in the middle of the gimplified sequence.  */
2786
  if (force_gimple_rhs)
2787
    rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
2788
                                    true, GSI_SAME_STMT);
2789
  if (gimple_assign_rhs1 (*stmt) != rhs)
2790
    {
2791
      gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
2792
      gcc_assert (*stmt == gsi_stmt (orig_gsi));
2793
    }
2794
 
2795
  return modify_this_stmt ? SRA_SA_PROCESSED : SRA_SA_NONE;
2796
}
2797
 
2798
/* Generate statements initializing scalar replacements of parts of function
2799
   parameters.  */
2800
 
2801
static void
2802
initialize_parameter_reductions (void)
2803
{
2804
  gimple_stmt_iterator gsi;
2805
  gimple_seq seq = NULL;
2806
  tree parm;
2807
 
2808
  for (parm = DECL_ARGUMENTS (current_function_decl);
2809
       parm;
2810
       parm = TREE_CHAIN (parm))
2811
    {
2812
      VEC (access_p, heap) *access_vec;
2813
      struct access *access;
2814
 
2815
      if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
2816
        continue;
2817
      access_vec = get_base_access_vector (parm);
2818
      if (!access_vec)
2819
        continue;
2820
 
2821
      if (!seq)
2822
        {
2823
          seq = gimple_seq_alloc ();
2824
          gsi = gsi_start (seq);
2825
        }
2826
 
2827
      for (access = VEC_index (access_p, access_vec, 0);
2828
           access;
2829
           access = access->next_grp)
2830
        generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true);
2831
    }
2832
 
2833
  if (seq)
2834
    gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
2835
}
2836
 
2837
/* The "main" function of intraprocedural SRA passes.  Runs the analysis and if
2838
   it reveals there are components of some aggregates to be scalarized, it runs
2839
   the required transformations.  */
2840
static unsigned int
2841
perform_intra_sra (void)
2842
{
2843
  int ret = 0;
2844
  sra_initialize ();
2845
 
2846
  if (!find_var_candidates ())
2847
    goto out;
2848
 
2849
  if (!scan_function (build_access_from_expr, build_accesses_from_assign, NULL,
2850
                      true, NULL))
2851
    goto out;
2852
 
2853
  if (!analyze_all_variable_accesses ())
2854
    goto out;
2855
 
2856
  scan_function (sra_modify_expr, sra_modify_assign, NULL, false, NULL);
2857
  initialize_parameter_reductions ();
2858
 
2859
  statistics_counter_event (cfun, "Scalar replacements created",
2860
                            sra_stats.replacements);
2861
  statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
2862
  statistics_counter_event (cfun, "Subtree copy stmts",
2863
                            sra_stats.subtree_copies);
2864
  statistics_counter_event (cfun, "Subreplacement stmts",
2865
                            sra_stats.subreplacements);
2866
  statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
2867
  statistics_counter_event (cfun, "Separate LHS and RHS handling",
2868
                            sra_stats.separate_lhs_rhs_handling);
2869
 
2870 378 julius
  if (cfg_changed)
2871
    ret = TODO_update_ssa | TODO_cleanup_cfg;
2872
  else
2873
    ret = TODO_update_ssa;
2874 280 jeremybenn
 
2875
 out:
2876
  sra_deinitialize ();
2877
  return ret;
2878
}
2879
 
2880
/* Perform early intraprocedural SRA.  */
2881
static unsigned int
2882
early_intra_sra (void)
2883
{
2884
  sra_mode = SRA_MODE_EARLY_INTRA;
2885
  return perform_intra_sra ();
2886
}
2887
 
2888
/* Perform "late" intraprocedural SRA.  */
2889
static unsigned int
2890
late_intra_sra (void)
2891
{
2892
  sra_mode = SRA_MODE_INTRA;
2893
  return perform_intra_sra ();
2894
}
2895
 
2896
 
2897
static bool
2898
gate_intra_sra (void)
2899
{
2900
  return flag_tree_sra != 0;
2901
}
2902
 
2903
 
2904
struct gimple_opt_pass pass_sra_early =
2905
{
2906
 {
2907
  GIMPLE_PASS,
2908
  "esra",                               /* name */
2909
  gate_intra_sra,                       /* gate */
2910
  early_intra_sra,                      /* execute */
2911
  NULL,                                 /* sub */
2912
  NULL,                                 /* next */
2913
  0,                                     /* static_pass_number */
2914
  TV_TREE_SRA,                          /* tv_id */
2915
  PROP_cfg | PROP_ssa,                  /* properties_required */
2916
  0,                                     /* properties_provided */
2917
  0,                                     /* properties_destroyed */
2918
  0,                                     /* todo_flags_start */
2919
  TODO_dump_func
2920
  | TODO_update_ssa
2921
  | TODO_ggc_collect
2922
  | TODO_verify_ssa                     /* todo_flags_finish */
2923
 }
2924
};
2925
 
2926
struct gimple_opt_pass pass_sra =
2927
{
2928
 {
2929
  GIMPLE_PASS,
2930
  "sra",                                /* name */
2931
  gate_intra_sra,                       /* gate */
2932
  late_intra_sra,                       /* execute */
2933
  NULL,                                 /* sub */
2934
  NULL,                                 /* next */
2935
  0,                                     /* static_pass_number */
2936
  TV_TREE_SRA,                          /* tv_id */
2937
  PROP_cfg | PROP_ssa,                  /* properties_required */
2938
  0,                                     /* properties_provided */
2939
  0,                                     /* properties_destroyed */
2940
  TODO_update_address_taken,            /* todo_flags_start */
2941
  TODO_dump_func
2942
  | TODO_update_ssa
2943
  | TODO_ggc_collect
2944
  | TODO_verify_ssa                     /* todo_flags_finish */
2945
 }
2946
};
2947
 
2948
 
2949
/* Return true iff PARM (which must be a parm_decl) is an unused scalar
2950
   parameter.  */
2951
 
2952
static bool
2953
is_unused_scalar_param (tree parm)
2954
{
2955
  tree name;
2956
  return (is_gimple_reg (parm)
2957
          && (!(name = gimple_default_def (cfun, parm))
2958
              || has_zero_uses (name)));
2959
}
2960
 
2961
/* Scan immediate uses of a default definition SSA name of a parameter PARM and
2962
   examine whether there are any direct or otherwise infeasible ones.  If so,
2963
   return true, otherwise return false.  PARM must be a gimple register with a
2964
   non-NULL default definition.  */
2965
 
2966
static bool
2967
ptr_parm_has_direct_uses (tree parm)
2968
{
2969
  imm_use_iterator ui;
2970
  gimple stmt;
2971
  tree name = gimple_default_def (cfun, parm);
2972
  bool ret = false;
2973
 
2974
  FOR_EACH_IMM_USE_STMT (stmt, ui, name)
2975
    {
2976
      int uses_ok = 0;
2977
      use_operand_p use_p;
2978
 
2979
      if (is_gimple_debug (stmt))
2980
        continue;
2981
 
2982
      /* Valid uses include dereferences on the lhs and the rhs.  */
2983
      if (gimple_has_lhs (stmt))
2984
        {
2985
          tree lhs = gimple_get_lhs (stmt);
2986
          while (handled_component_p (lhs))
2987
            lhs = TREE_OPERAND (lhs, 0);
2988
          if (INDIRECT_REF_P (lhs)
2989
              && TREE_OPERAND (lhs, 0) == name)
2990
            uses_ok++;
2991
        }
2992
      if (gimple_assign_single_p (stmt))
2993
        {
2994
          tree rhs = gimple_assign_rhs1 (stmt);
2995
          while (handled_component_p (rhs))
2996
            rhs = TREE_OPERAND (rhs, 0);
2997
          if (INDIRECT_REF_P (rhs)
2998
              && TREE_OPERAND (rhs, 0) == name)
2999
            uses_ok++;
3000
        }
3001
      else if (is_gimple_call (stmt))
3002
        {
3003
          unsigned i;
3004
          for (i = 0; i < gimple_call_num_args (stmt); ++i)
3005
            {
3006
              tree arg = gimple_call_arg (stmt, i);
3007
              while (handled_component_p (arg))
3008
                arg = TREE_OPERAND (arg, 0);
3009
              if (INDIRECT_REF_P (arg)
3010
                  && TREE_OPERAND (arg, 0) == name)
3011
                uses_ok++;
3012
            }
3013
        }
3014
 
3015
      /* If the number of valid uses does not match the number of
3016
         uses in this stmt there is an unhandled use.  */
3017
      FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3018
        --uses_ok;
3019
 
3020
      if (uses_ok != 0)
3021
        ret = true;
3022
 
3023
      if (ret)
3024
        BREAK_FROM_IMM_USE_STMT (ui);
3025
    }
3026
 
3027
  return ret;
3028
}
3029
 
3030
/* Identify candidates for reduction for IPA-SRA based on their type and mark
3031
   them in candidate_bitmap.  Note that these do not necessarily include
3032
   parameter which are unused and thus can be removed.  Return true iff any
3033
   such candidate has been found.  */
3034
 
3035
static bool
3036
find_param_candidates (void)
3037
{
3038
  tree parm;
3039
  int count = 0;
3040
  bool ret = false;
3041
 
3042
  for (parm = DECL_ARGUMENTS (current_function_decl);
3043
       parm;
3044
       parm = TREE_CHAIN (parm))
3045
    {
3046
      tree type = TREE_TYPE (parm);
3047
 
3048
      count++;
3049
 
3050
      if (TREE_THIS_VOLATILE (parm)
3051
          || TREE_ADDRESSABLE (parm)
3052
          || is_va_list_type (type))
3053
        continue;
3054
 
3055
      if (is_unused_scalar_param (parm))
3056
        {
3057
          ret = true;
3058
          continue;
3059
        }
3060
 
3061
      if (POINTER_TYPE_P (type))
3062
        {
3063
          type = TREE_TYPE (type);
3064
 
3065
          if (TREE_CODE (type) == FUNCTION_TYPE
3066
              || TYPE_VOLATILE (type)
3067
              || !is_gimple_reg (parm)
3068
              || is_va_list_type (type)
3069
              || ptr_parm_has_direct_uses (parm))
3070
            continue;
3071
        }
3072
      else if (!AGGREGATE_TYPE_P (type))
3073
        continue;
3074
 
3075
      if (!COMPLETE_TYPE_P (type)
3076
          || !host_integerp (TYPE_SIZE (type), 1)
3077
          || tree_low_cst (TYPE_SIZE (type), 1) == 0
3078
          || (AGGREGATE_TYPE_P (type)
3079
              && type_internals_preclude_sra_p (type)))
3080
        continue;
3081
 
3082
      bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3083
      ret = true;
3084
      if (dump_file && (dump_flags & TDF_DETAILS))
3085
        {
3086
          fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3087
          print_generic_expr (dump_file, parm, 0);
3088
          fprintf (dump_file, "\n");
3089
        }
3090
    }
3091
 
3092
  func_param_count = count;
3093
  return ret;
3094
}
3095
 
3096
/* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3097
   maybe_modified. */
3098
 
3099
static bool
3100
mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3101
                     void *data)
3102
{
3103
  struct access *repr = (struct access *) data;
3104
 
3105
  repr->grp_maybe_modified = 1;
3106
  return true;
3107
}
3108
 
3109
/* Analyze what representatives (in linked lists accessible from
3110
   REPRESENTATIVES) can be modified by side effects of statements in the
3111
   current function.  */
3112
 
3113
static void
3114
analyze_modified_params (VEC (access_p, heap) *representatives)
3115
{
3116
  int i;
3117
 
3118
  for (i = 0; i < func_param_count; i++)
3119
    {
3120
      struct access *repr;
3121
 
3122
      for (repr = VEC_index (access_p, representatives, i);
3123
           repr;
3124
           repr = repr->next_grp)
3125
        {
3126
          struct access *access;
3127
          bitmap visited;
3128
          ao_ref ar;
3129
 
3130
          if (no_accesses_p (repr))
3131
            continue;
3132
          if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3133
              || repr->grp_maybe_modified)
3134
            continue;
3135
 
3136
          ao_ref_init (&ar, repr->expr);
3137
          visited = BITMAP_ALLOC (NULL);
3138
          for (access = repr; access; access = access->next_sibling)
3139
            {
3140
              /* All accesses are read ones, otherwise grp_maybe_modified would
3141
                 be trivially set.  */
3142
              walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3143
                                  mark_maybe_modified, repr, &visited);
3144
              if (repr->grp_maybe_modified)
3145
                break;
3146
            }
3147
          BITMAP_FREE (visited);
3148
        }
3149
    }
3150
}
3151
 
3152
/* Propagate distances in bb_dereferences in the opposite direction than the
3153
   control flow edges, in each step storing the maximum of the current value
3154
   and the minimum of all successors.  These steps are repeated until the table
3155
   stabilizes.  Note that BBs which might terminate the functions (according to
3156
   final_bbs bitmap) never updated in this way.  */
3157
 
3158
static void
3159
propagate_dereference_distances (void)
3160
{
3161
  VEC (basic_block, heap) *queue;
3162
  basic_block bb;
3163
 
3164
  queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3165
  VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3166
  FOR_EACH_BB (bb)
3167
    {
3168
      VEC_quick_push (basic_block, queue, bb);
3169
      bb->aux = bb;
3170
    }
3171
 
3172
  while (!VEC_empty (basic_block, queue))
3173
    {
3174
      edge_iterator ei;
3175
      edge e;
3176
      bool change = false;
3177
      int i;
3178
 
3179
      bb = VEC_pop (basic_block, queue);
3180
      bb->aux = NULL;
3181
 
3182
      if (bitmap_bit_p (final_bbs, bb->index))
3183
        continue;
3184
 
3185
      for (i = 0; i < func_param_count; i++)
3186
        {
3187
          int idx = bb->index * func_param_count + i;
3188
          bool first = true;
3189
          HOST_WIDE_INT inh = 0;
3190
 
3191
          FOR_EACH_EDGE (e, ei, bb->succs)
3192
          {
3193
            int succ_idx = e->dest->index * func_param_count + i;
3194
 
3195
            if (e->src == EXIT_BLOCK_PTR)
3196
              continue;
3197
 
3198
            if (first)
3199
              {
3200
                first = false;
3201
                inh = bb_dereferences [succ_idx];
3202
              }
3203
            else if (bb_dereferences [succ_idx] < inh)
3204
              inh = bb_dereferences [succ_idx];
3205
          }
3206
 
3207
          if (!first && bb_dereferences[idx] < inh)
3208
            {
3209
              bb_dereferences[idx] = inh;
3210
              change = true;
3211
            }
3212
        }
3213
 
3214
      if (change && !bitmap_bit_p (final_bbs, bb->index))
3215
        FOR_EACH_EDGE (e, ei, bb->preds)
3216
          {
3217
            if (e->src->aux)
3218
              continue;
3219
 
3220
            e->src->aux = e->src;
3221
            VEC_quick_push (basic_block, queue, e->src);
3222
          }
3223
    }
3224
 
3225
  VEC_free (basic_block, heap, queue);
3226
}
3227
 
3228
/* Dump a dereferences TABLE with heading STR to file F.  */
3229
 
3230
static void
3231
dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3232
{
3233
  basic_block bb;
3234
 
3235
  fprintf (dump_file, str);
3236
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3237
    {
3238
      fprintf (f, "%4i  %i   ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3239
      if (bb != EXIT_BLOCK_PTR)
3240
        {
3241
          int i;
3242
          for (i = 0; i < func_param_count; i++)
3243
            {
3244
              int idx = bb->index * func_param_count + i;
3245
              fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3246
            }
3247
        }
3248
      fprintf (f, "\n");
3249
    }
3250
  fprintf (dump_file, "\n");
3251
}
3252
 
3253
/* Determine what (parts of) parameters passed by reference that are not
3254
   assigned to are not certainly dereferenced in this function and thus the
3255
   dereferencing cannot be safely moved to the caller without potentially
3256
   introducing a segfault.  Mark such REPRESENTATIVES as
3257
   grp_not_necessarilly_dereferenced.
3258
 
3259
   The dereferenced maximum "distance," i.e. the offset + size of the accessed
3260
   part is calculated rather than simple booleans are calculated for each
3261
   pointer parameter to handle cases when only a fraction of the whole
3262
   aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3263
   an example).
3264
 
3265
   The maximum dereference distances for each pointer parameter and BB are
3266
   already stored in bb_dereference.  This routine simply propagates these
3267
   values upwards by propagate_dereference_distances and then compares the
3268
   distances of individual parameters in the ENTRY BB to the equivalent
3269
   distances of each representative of a (fraction of a) parameter.  */
3270
 
3271
static void
3272
analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3273
{
3274
  int i;
3275
 
3276
  if (dump_file && (dump_flags & TDF_DETAILS))
3277
    dump_dereferences_table (dump_file,
3278
                             "Dereference table before propagation:\n",
3279
                             bb_dereferences);
3280
 
3281
  propagate_dereference_distances ();
3282
 
3283
  if (dump_file && (dump_flags & TDF_DETAILS))
3284
    dump_dereferences_table (dump_file,
3285
                             "Dereference table after propagation:\n",
3286
                             bb_dereferences);
3287
 
3288
  for (i = 0; i < func_param_count; i++)
3289
    {
3290
      struct access *repr = VEC_index (access_p, representatives, i);
3291
      int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3292
 
3293
      if (!repr || no_accesses_p (repr))
3294
        continue;
3295
 
3296
      do
3297
        {
3298
          if ((repr->offset + repr->size) > bb_dereferences[idx])
3299
            repr->grp_not_necessarilly_dereferenced = 1;
3300
          repr = repr->next_grp;
3301
        }
3302
      while (repr);
3303
    }
3304
}
3305
 
3306
/* Return the representative access for the parameter declaration PARM if it is
3307
   a scalar passed by reference which is not written to and the pointer value
3308
   is not used directly.  Thus, if it is legal to dereference it in the caller
3309
   and we can rule out modifications through aliases, such parameter should be
3310
   turned into one passed by value.  Return NULL otherwise.  */
3311
 
3312
static struct access *
3313
unmodified_by_ref_scalar_representative (tree parm)
3314
{
3315
  int i, access_count;
3316
  struct access *repr;
3317
  VEC (access_p, heap) *access_vec;
3318
 
3319
  access_vec = get_base_access_vector (parm);
3320
  gcc_assert (access_vec);
3321
  repr = VEC_index (access_p, access_vec, 0);
3322
  if (repr->write)
3323
    return NULL;
3324
  repr->group_representative = repr;
3325
 
3326
  access_count = VEC_length (access_p, access_vec);
3327
  for (i = 1; i < access_count; i++)
3328
    {
3329
      struct access *access = VEC_index (access_p, access_vec, i);
3330
      if (access->write)
3331
        return NULL;
3332
      access->group_representative = repr;
3333
      access->next_sibling = repr->next_sibling;
3334
      repr->next_sibling = access;
3335
    }
3336
 
3337
  repr->grp_read = 1;
3338
  repr->grp_scalar_ptr = 1;
3339
  return repr;
3340
}
3341
 
3342
/* Return true iff this access precludes IPA-SRA of the parameter it is
3343
   associated with. */
3344
 
3345
static bool
3346
access_precludes_ipa_sra_p (struct access *access)
3347
{
3348
  /* Avoid issues such as the second simple testcase in PR 42025.  The problem
3349
     is incompatible assign in a call statement (and possibly even in asm
3350
     statements).  This can be relaxed by using a new temporary but only for
3351
     non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3352
     intraprocedural SRA we deal with this by keeping the old aggregate around,
3353
     something we cannot do in IPA-SRA.)  */
3354
  if (access->write
3355
      && (is_gimple_call (access->stmt)
3356
          || gimple_code (access->stmt) == GIMPLE_ASM))
3357
    return true;
3358
 
3359
  return false;
3360
}
3361
 
3362
 
3363
/* Sort collected accesses for parameter PARM, identify representatives for
3364
   each accessed region and link them together.  Return NULL if there are
3365
   different but overlapping accesses, return the special ptr value meaning
3366
   there are no accesses for this parameter if that is the case and return the
3367
   first representative otherwise.  Set *RO_GRP if there is a group of accesses
3368
   with only read (i.e. no write) accesses.  */
3369
 
3370
static struct access *
3371
splice_param_accesses (tree parm, bool *ro_grp)
3372
{
3373
  int i, j, access_count, group_count;
3374
  int agg_size, total_size = 0;
3375
  struct access *access, *res, **prev_acc_ptr = &res;
3376
  VEC (access_p, heap) *access_vec;
3377
 
3378
  access_vec = get_base_access_vector (parm);
3379
  if (!access_vec)
3380
    return &no_accesses_representant;
3381
  access_count = VEC_length (access_p, access_vec);
3382
 
3383
  qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
3384
         compare_access_positions);
3385
 
3386
  i = 0;
3387
  total_size = 0;
3388
  group_count = 0;
3389
  while (i < access_count)
3390
    {
3391
      bool modification;
3392
      access = VEC_index (access_p, access_vec, i);
3393
      modification = access->write;
3394
      if (access_precludes_ipa_sra_p (access))
3395
        return NULL;
3396
 
3397
      /* Access is about to become group representative unless we find some
3398
         nasty overlap which would preclude us from breaking this parameter
3399
         apart. */
3400
 
3401
      j = i + 1;
3402
      while (j < access_count)
3403
        {
3404
          struct access *ac2 = VEC_index (access_p, access_vec, j);
3405
          if (ac2->offset != access->offset)
3406
            {
3407
              /* All or nothing law for parameters. */
3408
              if (access->offset + access->size > ac2->offset)
3409
                return NULL;
3410
              else
3411
                break;
3412
            }
3413
          else if (ac2->size != access->size)
3414
            return NULL;
3415
 
3416
          if (access_precludes_ipa_sra_p (ac2))
3417
            return NULL;
3418
 
3419
          modification |= ac2->write;
3420
          ac2->group_representative = access;
3421
          ac2->next_sibling = access->next_sibling;
3422
          access->next_sibling = ac2;
3423
          j++;
3424
        }
3425
 
3426
      group_count++;
3427
      access->grp_maybe_modified = modification;
3428
      if (!modification)
3429
        *ro_grp = true;
3430
      *prev_acc_ptr = access;
3431
      prev_acc_ptr = &access->next_grp;
3432
      total_size += access->size;
3433
      i = j;
3434
    }
3435
 
3436
  if (POINTER_TYPE_P (TREE_TYPE (parm)))
3437
    agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3438
  else
3439
    agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3440
  if (total_size >= agg_size)
3441
    return NULL;
3442
 
3443
  gcc_assert (group_count > 0);
3444
  return res;
3445
}
3446
 
3447
/* Decide whether parameters with representative accesses given by REPR should
3448
   be reduced into components.  */
3449
 
3450
static int
3451
decide_one_param_reduction (struct access *repr)
3452
{
3453
  int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3454
  bool by_ref;
3455
  tree parm;
3456
 
3457
  parm = repr->base;
3458
  cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3459
  gcc_assert (cur_parm_size > 0);
3460
 
3461
  if (POINTER_TYPE_P (TREE_TYPE (parm)))
3462
    {
3463
      by_ref = true;
3464
      agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3465
    }
3466
  else
3467
    {
3468
      by_ref = false;
3469
      agg_size = cur_parm_size;
3470
    }
3471
 
3472
  if (dump_file)
3473
    {
3474
      struct access *acc;
3475
      fprintf (dump_file, "Evaluating PARAM group sizes for ");
3476
      print_generic_expr (dump_file, parm, 0);
3477
      fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3478
      for (acc = repr; acc; acc = acc->next_grp)
3479
        dump_access (dump_file, acc, true);
3480
    }
3481
 
3482
  total_size = 0;
3483
  new_param_count = 0;
3484
 
3485
  for (; repr; repr = repr->next_grp)
3486
    {
3487
      gcc_assert (parm == repr->base);
3488
      new_param_count++;
3489
 
3490
      if (!by_ref || (!repr->grp_maybe_modified
3491
                      && !repr->grp_not_necessarilly_dereferenced))
3492
        total_size += repr->size;
3493
      else
3494
        total_size += cur_parm_size;
3495
    }
3496
 
3497
  gcc_assert (new_param_count > 0);
3498
 
3499
  if (optimize_function_for_size_p (cfun))
3500
    parm_size_limit = cur_parm_size;
3501
  else
3502
    parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3503
                       * cur_parm_size);
3504
 
3505
  if (total_size < agg_size
3506
      && total_size <= parm_size_limit)
3507
    {
3508
      if (dump_file)
3509
        fprintf (dump_file, "    ....will be split into %i components\n",
3510
                 new_param_count);
3511
      return new_param_count;
3512
    }
3513
  else
3514
    return 0;
3515
}
3516
 
3517
/* The order of the following enums is important, we need to do extra work for
3518
   UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES.  */
3519
enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3520
                          MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3521
 
3522
/* Identify representatives of all accesses to all candidate parameters for
3523
   IPA-SRA.  Return result based on what representatives have been found. */
3524
 
3525
static enum ipa_splicing_result
3526
splice_all_param_accesses (VEC (access_p, heap) **representatives)
3527
{
3528
  enum ipa_splicing_result result = NO_GOOD_ACCESS;
3529
  tree parm;
3530
  struct access *repr;
3531
 
3532
  *representatives = VEC_alloc (access_p, heap, func_param_count);
3533
 
3534
  for (parm = DECL_ARGUMENTS (current_function_decl);
3535
       parm;
3536
       parm = TREE_CHAIN (parm))
3537
    {
3538
      if (is_unused_scalar_param (parm))
3539
        {
3540
          VEC_quick_push (access_p, *representatives,
3541
                          &no_accesses_representant);
3542
          if (result == NO_GOOD_ACCESS)
3543
            result = UNUSED_PARAMS;
3544
        }
3545
      else if (POINTER_TYPE_P (TREE_TYPE (parm))
3546
               && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3547
               && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3548
        {
3549
          repr = unmodified_by_ref_scalar_representative (parm);
3550
          VEC_quick_push (access_p, *representatives, repr);
3551
          if (repr)
3552
            result = UNMODIF_BY_REF_ACCESSES;
3553
        }
3554
      else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3555
        {
3556
          bool ro_grp = false;
3557
          repr = splice_param_accesses (parm, &ro_grp);
3558
          VEC_quick_push (access_p, *representatives, repr);
3559
 
3560
          if (repr && !no_accesses_p (repr))
3561
            {
3562
              if (POINTER_TYPE_P (TREE_TYPE (parm)))
3563
                {
3564
                  if (ro_grp)
3565
                    result = UNMODIF_BY_REF_ACCESSES;
3566
                  else if (result < MODIF_BY_REF_ACCESSES)
3567
                    result = MODIF_BY_REF_ACCESSES;
3568
                }
3569
              else if (result < BY_VAL_ACCESSES)
3570
                result = BY_VAL_ACCESSES;
3571
            }
3572
          else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3573
            result = UNUSED_PARAMS;
3574
        }
3575
      else
3576
        VEC_quick_push (access_p, *representatives, NULL);
3577
    }
3578
 
3579
  if (result == NO_GOOD_ACCESS)
3580
    {
3581
      VEC_free (access_p, heap, *representatives);
3582
      *representatives = NULL;
3583
      return NO_GOOD_ACCESS;
3584
    }
3585
 
3586
  return result;
3587
}
3588
 
3589
/* Return the index of BASE in PARMS.  Abort if it is not found.  */
3590
 
3591
static inline int
3592
get_param_index (tree base, VEC(tree, heap) *parms)
3593
{
3594
  int i, len;
3595
 
3596
  len = VEC_length (tree, parms);
3597
  for (i = 0; i < len; i++)
3598
    if (VEC_index (tree, parms, i) == base)
3599
      return i;
3600
  gcc_unreachable ();
3601
}
3602
 
3603
/* Convert the decisions made at the representative level into compact
3604
   parameter adjustments.  REPRESENTATIVES are pointers to first
3605
   representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3606
   final number of adjustments.  */
3607
 
3608
static ipa_parm_adjustment_vec
3609
turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
3610
                                       int adjustments_count)
3611
{
3612
  VEC (tree, heap) *parms;
3613
  ipa_parm_adjustment_vec adjustments;
3614
  tree parm;
3615
  int i;
3616
 
3617
  gcc_assert (adjustments_count > 0);
3618
  parms = ipa_get_vector_of_formal_parms (current_function_decl);
3619
  adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
3620
  parm = DECL_ARGUMENTS (current_function_decl);
3621
  for (i = 0; i < func_param_count; i++, parm = TREE_CHAIN (parm))
3622
    {
3623
      struct access *repr = VEC_index (access_p, representatives, i);
3624
 
3625
      if (!repr || no_accesses_p (repr))
3626
        {
3627
          struct ipa_parm_adjustment *adj;
3628
 
3629
          adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3630
          memset (adj, 0, sizeof (*adj));
3631
          adj->base_index = get_param_index (parm, parms);
3632
          adj->base = parm;
3633
          if (!repr)
3634
            adj->copy_param = 1;
3635
          else
3636
            adj->remove_param = 1;
3637
        }
3638
      else
3639
        {
3640
          struct ipa_parm_adjustment *adj;
3641
          int index = get_param_index (parm, parms);
3642
 
3643
          for (; repr; repr = repr->next_grp)
3644
            {
3645
              adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3646
              memset (adj, 0, sizeof (*adj));
3647
              gcc_assert (repr->base == parm);
3648
              adj->base_index = index;
3649
              adj->base = repr->base;
3650
              adj->type = repr->type;
3651
              adj->offset = repr->offset;
3652
              adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
3653
                             && (repr->grp_maybe_modified
3654
                                 || repr->grp_not_necessarilly_dereferenced));
3655
 
3656
            }
3657
        }
3658
    }
3659
  VEC_free (tree, heap, parms);
3660
  return adjustments;
3661
}
3662
 
3663
/* Analyze the collected accesses and produce a plan what to do with the
3664
   parameters in the form of adjustments, NULL meaning nothing.  */
3665
 
3666
static ipa_parm_adjustment_vec
3667
analyze_all_param_acesses (void)
3668
{
3669
  enum ipa_splicing_result repr_state;
3670
  bool proceed = false;
3671
  int i, adjustments_count = 0;
3672
  VEC (access_p, heap) *representatives;
3673
  ipa_parm_adjustment_vec adjustments;
3674
 
3675
  repr_state = splice_all_param_accesses (&representatives);
3676
  if (repr_state == NO_GOOD_ACCESS)
3677
    return NULL;
3678
 
3679
  /* If there are any parameters passed by reference which are not modified
3680
     directly, we need to check whether they can be modified indirectly.  */
3681
  if (repr_state == UNMODIF_BY_REF_ACCESSES)
3682
    {
3683
      analyze_caller_dereference_legality (representatives);
3684
      analyze_modified_params (representatives);
3685
    }
3686
 
3687
  for (i = 0; i < func_param_count; i++)
3688
    {
3689
      struct access *repr = VEC_index (access_p, representatives, i);
3690
 
3691
      if (repr && !no_accesses_p (repr))
3692
        {
3693
          if (repr->grp_scalar_ptr)
3694
            {
3695
              adjustments_count++;
3696
              if (repr->grp_not_necessarilly_dereferenced
3697
                  || repr->grp_maybe_modified)
3698
                VEC_replace (access_p, representatives, i, NULL);
3699
              else
3700
                {
3701
                  proceed = true;
3702
                  sra_stats.scalar_by_ref_to_by_val++;
3703
                }
3704
            }
3705
          else
3706
            {
3707
              int new_components = decide_one_param_reduction (repr);
3708
 
3709
              if (new_components == 0)
3710
                {
3711
                  VEC_replace (access_p, representatives, i, NULL);
3712
                  adjustments_count++;
3713
                }
3714
              else
3715
                {
3716
                  adjustments_count += new_components;
3717
                  sra_stats.aggregate_params_reduced++;
3718
                  sra_stats.param_reductions_created += new_components;
3719
                  proceed = true;
3720
                }
3721
            }
3722
        }
3723
      else
3724
        {
3725
          if (no_accesses_p (repr))
3726
            {
3727
              proceed = true;
3728
              sra_stats.deleted_unused_parameters++;
3729
            }
3730
          adjustments_count++;
3731
        }
3732
    }
3733
 
3734
  if (!proceed && dump_file)
3735
    fprintf (dump_file, "NOT proceeding to change params.\n");
3736
 
3737
  if (proceed)
3738
    adjustments = turn_representatives_into_adjustments (representatives,
3739
                                                         adjustments_count);
3740
  else
3741
    adjustments = NULL;
3742
 
3743
  VEC_free (access_p, heap, representatives);
3744
  return adjustments;
3745
}
3746
 
3747
/* If a parameter replacement identified by ADJ does not yet exist in the form
3748
   of declaration, create it and record it, otherwise return the previously
3749
   created one.  */
3750
 
3751
static tree
3752
get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
3753
{
3754
  tree repl;
3755
  if (!adj->new_ssa_base)
3756
    {
3757
      char *pretty_name = make_fancy_name (adj->base);
3758
 
3759
      repl = create_tmp_var (TREE_TYPE (adj->base), "ISR");
3760
      if (TREE_CODE (TREE_TYPE (repl)) == COMPLEX_TYPE
3761
          || TREE_CODE (TREE_TYPE (repl)) == VECTOR_TYPE)
3762
        DECL_GIMPLE_REG_P (repl) = 1;
3763
      DECL_NAME (repl) = get_identifier (pretty_name);
3764
      obstack_free (&name_obstack, pretty_name);
3765
 
3766
      get_var_ann (repl);
3767
      add_referenced_var (repl);
3768
      adj->new_ssa_base = repl;
3769
    }
3770
  else
3771
    repl = adj->new_ssa_base;
3772
  return repl;
3773
}
3774
 
3775
/* Find the first adjustment for a particular parameter BASE in a vector of
3776
   ADJUSTMENTS which is not a copy_param.  Return NULL if there is no such
3777
   adjustment. */
3778
 
3779
static struct ipa_parm_adjustment *
3780
get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
3781
{
3782
  int i, len;
3783
 
3784
  len = VEC_length (ipa_parm_adjustment_t, adjustments);
3785
  for (i = 0; i < len; i++)
3786
    {
3787
      struct ipa_parm_adjustment *adj;
3788
 
3789
      adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3790
      if (!adj->copy_param && adj->base == base)
3791
        return adj;
3792
    }
3793
 
3794
  return NULL;
3795
}
3796
 
3797
/* Callback for scan_function.  If the statement STMT defines an SSA_NAME of a
3798
   parameter which is to be removed because its value is not used, replace the
3799
   SSA_NAME with a one relating to a created VAR_DECL and replace all of its
3800
   uses too and return true (update_stmt is then issued for the statement by
3801
   the caller).  DATA is a pointer to an adjustments vector.  */
3802
 
3803
static bool
3804
replace_removed_params_ssa_names (gimple stmt, void *data)
3805
{
3806
  VEC (ipa_parm_adjustment_t, heap) *adjustments;
3807
  struct ipa_parm_adjustment *adj;
3808
  tree lhs, decl, repl, name;
3809
 
3810
  adjustments = (VEC (ipa_parm_adjustment_t, heap) *) data;
3811
  if (gimple_code (stmt) == GIMPLE_PHI)
3812
    lhs = gimple_phi_result (stmt);
3813
  else if (is_gimple_assign (stmt))
3814
    lhs = gimple_assign_lhs (stmt);
3815
  else if (is_gimple_call (stmt))
3816
    lhs = gimple_call_lhs (stmt);
3817
  else
3818
    gcc_unreachable ();
3819
 
3820
  if (TREE_CODE (lhs) != SSA_NAME)
3821
    return false;
3822
  decl = SSA_NAME_VAR (lhs);
3823
  if (TREE_CODE (decl) != PARM_DECL)
3824
    return false;
3825
 
3826
  adj = get_adjustment_for_base (adjustments, decl);
3827
  if (!adj)
3828
    return false;
3829
 
3830
  repl = get_replaced_param_substitute (adj);
3831
  name = make_ssa_name (repl, stmt);
3832
 
3833
  if (dump_file)
3834
    {
3835
      fprintf (dump_file, "replacing an SSA name of a removed param ");
3836
      print_generic_expr (dump_file, lhs, 0);
3837
      fprintf (dump_file, " with ");
3838
      print_generic_expr (dump_file, name, 0);
3839
      fprintf (dump_file, "\n");
3840
    }
3841
 
3842
  if (is_gimple_assign (stmt))
3843
    gimple_assign_set_lhs (stmt, name);
3844
  else if (is_gimple_call (stmt))
3845
    gimple_call_set_lhs (stmt, name);
3846
  else
3847
    gimple_phi_set_result (stmt, name);
3848
 
3849
  replace_uses_by (lhs, name);
3850
  release_ssa_name (lhs);
3851
  return true;
3852
}
3853
 
3854
/* Callback for scan_function and helper to sra_ipa_modify_assign.  If the
3855
   expression *EXPR should be replaced by a reduction of a parameter, do so.
3856
   DATA is a pointer to a vector of adjustments.  DONT_CONVERT specifies
3857
   whether the function should care about type incompatibility the current and
3858
   new expressions.  If it is true, the function will leave incompatibility
3859
   issues to the caller.
3860
 
3861
   When called directly by scan_function, DONT_CONVERT is true when the EXPR is
3862
   a write (LHS) expression.  */
3863
 
3864
static bool
3865
sra_ipa_modify_expr (tree *expr, gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
3866
                     bool dont_convert, void *data)
3867
{
3868
  ipa_parm_adjustment_vec adjustments;
3869
  int i, len;
3870
  struct ipa_parm_adjustment *adj, *cand = NULL;
3871
  HOST_WIDE_INT offset, size, max_size;
3872
  tree base, src;
3873
 
3874
  adjustments = (VEC (ipa_parm_adjustment_t, heap) *) data;
3875
  len = VEC_length (ipa_parm_adjustment_t, adjustments);
3876
 
3877
  if (TREE_CODE (*expr) == BIT_FIELD_REF
3878
      || TREE_CODE (*expr) == IMAGPART_EXPR
3879
      || TREE_CODE (*expr) == REALPART_EXPR)
3880
    {
3881
      expr = &TREE_OPERAND (*expr, 0);
3882
      dont_convert = false;
3883
    }
3884
 
3885
  base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
3886
  if (!base || size == -1 || max_size == -1)
3887
    return false;
3888
 
3889
  if (INDIRECT_REF_P (base))
3890
    base = TREE_OPERAND (base, 0);
3891
 
3892
  base = get_ssa_base_param (base);
3893
  if (!base || TREE_CODE (base) != PARM_DECL)
3894
    return false;
3895
 
3896
  for (i = 0; i < len; i++)
3897
    {
3898
      adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3899
 
3900
      if (adj->base == base &&
3901
          (adj->offset == offset || adj->remove_param))
3902
        {
3903
          cand = adj;
3904
          break;
3905
        }
3906
    }
3907
  if (!cand || cand->copy_param || cand->remove_param)
3908
    return false;
3909
 
3910
  if (cand->by_ref)
3911
    {
3912
      tree folded;
3913
      src = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (cand->reduction)),
3914
                    cand->reduction);
3915
      folded = gimple_fold_indirect_ref (src);
3916
      if (folded)
3917
        src = folded;
3918
    }
3919
  else
3920
    src = cand->reduction;
3921
 
3922
  if (dump_file && (dump_flags & TDF_DETAILS))
3923
    {
3924
      fprintf (dump_file, "About to replace expr ");
3925
      print_generic_expr (dump_file, *expr, 0);
3926
      fprintf (dump_file, " with ");
3927
      print_generic_expr (dump_file, src, 0);
3928
      fprintf (dump_file, "\n");
3929
    }
3930
 
3931
  if (!dont_convert
3932
      && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3933
    {
3934
      tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3935
      *expr = vce;
3936
    }
3937
  else
3938
    *expr = src;
3939
  return true;
3940
}
3941
 
3942
/* Callback for scan_function to process assign statements.  Performs
3943
   essentially the same function like sra_ipa_modify_expr.  */
3944
 
3945
static enum scan_assign_result
3946
sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi, void *data)
3947
{
3948
  gimple stmt = *stmt_ptr;
3949
  tree *lhs_p, *rhs_p;
3950
  bool any;
3951
 
3952
  if (!gimple_assign_single_p (stmt))
3953
    return SRA_SA_NONE;
3954
 
3955
  rhs_p = gimple_assign_rhs1_ptr (stmt);
3956
  lhs_p = gimple_assign_lhs_ptr (stmt);
3957
 
3958
  any = sra_ipa_modify_expr (rhs_p, gsi, true, data);
3959
  any |= sra_ipa_modify_expr (lhs_p, gsi, true, data);
3960
  if (any)
3961
    {
3962
      tree new_rhs = NULL_TREE;
3963
 
3964
      if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
3965
        {
3966
          if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
3967
            {
3968
              /* V_C_Es of constructors can cause trouble (PR 42714).  */
3969
              if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
3970
                *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
3971
              else
3972
                *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
3973
            }
3974
          else
3975
            new_rhs = fold_build1_loc (gimple_location (stmt),
3976
                                       VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
3977
                                       *rhs_p);
3978
        }
3979
      else if (REFERENCE_CLASS_P (*rhs_p)
3980
               && is_gimple_reg_type (TREE_TYPE (*lhs_p))
3981
               && !is_gimple_reg (*lhs_p))
3982
        /* This can happen when an assignment in between two single field
3983
           structures is turned into an assignment in between two pointers to
3984
           scalars (PR 42237).  */
3985
        new_rhs = *rhs_p;
3986
 
3987
      if (new_rhs)
3988
        {
3989
          tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
3990
                                               true, GSI_SAME_STMT);
3991
 
3992
          gimple_assign_set_rhs_from_tree (gsi, tmp);
3993
        }
3994
 
3995
      return SRA_SA_PROCESSED;
3996
    }
3997
 
3998
  return SRA_SA_NONE;
3999
}
4000
 
4001
/* Call gimple_debug_bind_reset_value on all debug statements describing
4002
   gimple register parameters that are being removed or replaced.  */
4003
 
4004
static void
4005
sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4006
{
4007
  int i, len;
4008
 
4009
  len = VEC_length (ipa_parm_adjustment_t, adjustments);
4010
  for (i = 0; i < len; i++)
4011
    {
4012
      struct ipa_parm_adjustment *adj;
4013
      imm_use_iterator ui;
4014
      gimple stmt;
4015
      tree name;
4016
 
4017
      adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4018
      if (adj->copy_param || !is_gimple_reg (adj->base))
4019
        continue;
4020
      name = gimple_default_def (cfun, adj->base);
4021
      if (!name)
4022
        continue;
4023
      FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4024
        {
4025
          /* All other users must have been removed by scan_function.  */
4026
          gcc_assert (is_gimple_debug (stmt));
4027
          gimple_debug_bind_reset_value (stmt);
4028
          update_stmt (stmt);
4029
        }
4030
    }
4031
}
4032
 
4033
/* Return true iff all callers have at least as many actual arguments as there
4034
   are formal parameters in the current function.  */
4035
 
4036
static bool
4037
all_callers_have_enough_arguments_p (struct cgraph_node *node)
4038
{
4039
  struct cgraph_edge *cs;
4040
  for (cs = node->callers; cs; cs = cs->next_caller)
4041
    if (!callsite_has_enough_arguments_p (cs->call_stmt))
4042
      return false;
4043
 
4044
  return true;
4045
}
4046
 
4047
 
4048
/* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS.  */
4049
 
4050
static void
4051
convert_callers (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4052
{
4053
  tree old_cur_fndecl = current_function_decl;
4054
  struct cgraph_edge *cs;
4055
  bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4056
 
4057
  for (cs = node->callers; cs; cs = cs->next_caller)
4058
    {
4059
      current_function_decl = cs->caller->decl;
4060
      push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4061
 
4062
      if (dump_file)
4063
        fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4064
                 cs->caller->uid, cs->callee->uid,
4065
                 cgraph_node_name (cs->caller),
4066
                 cgraph_node_name (cs->callee));
4067
 
4068
      ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4069
 
4070
      pop_cfun ();
4071
    }
4072
 
4073
  for (cs = node->callers; cs; cs = cs->next_caller)
4074
    if (cs->caller != node
4075
        && !bitmap_bit_p (recomputed_callers, cs->caller->uid))
4076
      {
4077
        compute_inline_parameters (cs->caller);
4078
        bitmap_set_bit (recomputed_callers, cs->caller->uid);
4079
      }
4080
  BITMAP_FREE (recomputed_callers);
4081
 
4082
  current_function_decl = old_cur_fndecl;
4083
  return;
4084
}
4085
 
4086
/* Perform all the modification required in IPA-SRA for NODE to have parameters
4087
   as given in ADJUSTMENTS.  */
4088
 
4089
static void
4090
modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4091
{
4092
  struct cgraph_node *new_node;
4093
  struct cgraph_edge *cs;
4094
  VEC (cgraph_edge_p, heap) * redirect_callers;
4095
  int node_callers;
4096
 
4097
  node_callers = 0;
4098
  for (cs = node->callers; cs != NULL; cs = cs->next_caller)
4099
    node_callers++;
4100
  redirect_callers = VEC_alloc (cgraph_edge_p, heap, node_callers);
4101
  for (cs = node->callers; cs != NULL; cs = cs->next_caller)
4102
    VEC_quick_push (cgraph_edge_p, redirect_callers, cs);
4103
 
4104
  rebuild_cgraph_edges ();
4105
  pop_cfun ();
4106
  current_function_decl = NULL_TREE;
4107
 
4108
  new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL);
4109
  current_function_decl = new_node->decl;
4110
  push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4111
 
4112
  ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4113
  scan_function (sra_ipa_modify_expr, sra_ipa_modify_assign,
4114
                 replace_removed_params_ssa_names, false, adjustments);
4115
  sra_ipa_reset_debug_stmts (adjustments);
4116
  convert_callers (new_node, adjustments);
4117
  cgraph_make_node_local (new_node);
4118
  return;
4119
}
4120
 
4121
/* Return false the function is apparently unsuitable for IPA-SRA based on it's
4122
   attributes, return true otherwise.  NODE is the cgraph node of the current
4123
   function.  */
4124
 
4125
static bool
4126
ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4127
{
4128
  if (!cgraph_node_can_be_local_p (node))
4129
    {
4130
      if (dump_file)
4131
        fprintf (dump_file, "Function not local to this compilation unit.\n");
4132
      return false;
4133
    }
4134
 
4135
  if (!tree_versionable_function_p (node->decl))
4136
    {
4137
      if (dump_file)
4138
        fprintf (dump_file, "Function not local to this compilation unit.\n");
4139
      return false;
4140
    }
4141
 
4142
  if (DECL_VIRTUAL_P (current_function_decl))
4143
    {
4144
      if (dump_file)
4145
        fprintf (dump_file, "Function is a virtual method.\n");
4146
      return false;
4147
    }
4148
 
4149
  if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4150
      && node->global.size >= MAX_INLINE_INSNS_AUTO)
4151
    {
4152
      if (dump_file)
4153
        fprintf (dump_file, "Function too big to be made truly local.\n");
4154
      return false;
4155
    }
4156
 
4157
  if (!node->callers)
4158
    {
4159
      if (dump_file)
4160
        fprintf (dump_file,
4161
                 "Function has no callers in this compilation unit.\n");
4162
      return false;
4163
    }
4164
 
4165
  if (cfun->stdarg)
4166
    {
4167
      if (dump_file)
4168
        fprintf (dump_file, "Function uses stdarg. \n");
4169
      return false;
4170
    }
4171
 
4172
  if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4173
    return false;
4174
 
4175
  return true;
4176
}
4177
 
4178
/* Perform early interprocedural SRA.  */
4179
 
4180
static unsigned int
4181
ipa_early_sra (void)
4182
{
4183
  struct cgraph_node *node = cgraph_node (current_function_decl);
4184
  ipa_parm_adjustment_vec adjustments;
4185
  int ret = 0;
4186
 
4187
  if (!ipa_sra_preliminary_function_checks (node))
4188
    return 0;
4189
 
4190
  sra_initialize ();
4191
  sra_mode = SRA_MODE_EARLY_IPA;
4192
 
4193
  if (!find_param_candidates ())
4194
    {
4195
      if (dump_file)
4196
        fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4197
      goto simple_out;
4198
    }
4199
 
4200
  if (!all_callers_have_enough_arguments_p (node))
4201
    {
4202
      if (dump_file)
4203
        fprintf (dump_file, "There are callers with insufficient number of "
4204
                 "arguments.\n");
4205
      goto simple_out;
4206
    }
4207
 
4208
  bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4209
                                 func_param_count
4210
                                 * last_basic_block_for_function (cfun));
4211
  final_bbs = BITMAP_ALLOC (NULL);
4212
 
4213
  scan_function (build_access_from_expr, build_accesses_from_assign,
4214
                 NULL, true, NULL);
4215
  if (encountered_apply_args)
4216
    {
4217
      if (dump_file)
4218
        fprintf (dump_file, "Function calls  __builtin_apply_args().\n");
4219
      goto out;
4220
    }
4221
 
4222
  if (encountered_unchangable_recursive_call)
4223
    {
4224
      if (dump_file)
4225
        fprintf (dump_file, "Function calls itself with insufficient "
4226
                 "number of arguments.\n");
4227
      goto out;
4228
    }
4229
 
4230
  adjustments = analyze_all_param_acesses ();
4231
  if (!adjustments)
4232
    goto out;
4233
  if (dump_file)
4234
    ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4235
 
4236
  modify_function (node, adjustments);
4237
  VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4238 378 julius
  if (cfg_changed)
4239
    ret = TODO_update_ssa | TODO_cleanup_cfg;
4240
  else
4241
    ret = TODO_update_ssa;
4242 280 jeremybenn
 
4243
  statistics_counter_event (cfun, "Unused parameters deleted",
4244
                            sra_stats.deleted_unused_parameters);
4245
  statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4246
                            sra_stats.scalar_by_ref_to_by_val);
4247
  statistics_counter_event (cfun, "Aggregate parameters broken up",
4248
                            sra_stats.aggregate_params_reduced);
4249
  statistics_counter_event (cfun, "Aggregate parameter components created",
4250
                            sra_stats.param_reductions_created);
4251
 
4252
 out:
4253
  BITMAP_FREE (final_bbs);
4254
  free (bb_dereferences);
4255
 simple_out:
4256
  sra_deinitialize ();
4257
  return ret;
4258
}
4259
 
4260
/* Return if early ipa sra shall be performed.  */
4261
static bool
4262
ipa_early_sra_gate (void)
4263
{
4264
  return flag_ipa_sra;
4265
}
4266
 
4267
struct gimple_opt_pass pass_early_ipa_sra =
4268
{
4269
 {
4270
  GIMPLE_PASS,
4271
  "eipa_sra",                           /* name */
4272
  ipa_early_sra_gate,                   /* gate */
4273
  ipa_early_sra,                        /* execute */
4274
  NULL,                                 /* sub */
4275
  NULL,                                 /* next */
4276
  0,                                     /* static_pass_number */
4277
  TV_IPA_SRA,                           /* tv_id */
4278
  0,                                     /* properties_required */
4279
  0,                                     /* properties_provided */
4280
  0,                                     /* properties_destroyed */
4281
  0,                                     /* todo_flags_start */
4282
  TODO_dump_func | TODO_dump_cgraph     /* todo_flags_finish */
4283
 }
4284
};
4285
 
4286
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.