OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [tree-sra.c] - Blame information for rev 290

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Scalar Replacement of Aggregates (SRA) converts some structure
2
   references into scalar references, exposing them to the scalar
3
   optimizers.
4
   Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
5
   Contributed by Martin Jambor <mjambor@suse.cz>
6
 
7
This file is part of GCC.
8
 
9
GCC is free software; you can redistribute it and/or modify it under
10
the terms of the GNU General Public License as published by the Free
11
Software Foundation; either version 3, or (at your option) any later
12
version.
13
 
14
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15
WARRANTY; without even the implied warranty of MERCHANTABILITY or
16
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17
for more details.
18
 
19
You should have received a copy of the GNU General Public License
20
along with GCC; see the file COPYING3.  If not see
21
<http://www.gnu.org/licenses/>.  */
22
 
23
/* This file implements Scalar Reduction of Aggregates (SRA).  SRA is run
24
   twice, once in the early stages of compilation (early SRA) and once in the
25
   late stages (late SRA).  The aim of both is to turn references to scalar
26
   parts of aggregates into uses of independent scalar variables.
27
 
28
   The two passes are nearly identical, the only difference is that early SRA
29
   does not scalarize unions which are used as the result in a GIMPLE_RETURN
30
   statement because together with inlining this can lead to weird type
31
   conversions.
32
 
33
   Both passes operate in four stages:
34
 
35
   1. The declarations that have properties which make them candidates for
36
      scalarization are identified in function find_var_candidates().  The
37
      candidates are stored in candidate_bitmap.
38
 
39
   2. The function body is scanned.  In the process, declarations which are
40
      used in a manner that prevent their scalarization are removed from the
41
      candidate bitmap.  More importantly, for every access into an aggregate,
42
      an access structure (struct access) is created by create_access() and
43
      stored in a vector associated with the aggregate.  Among other
44
      information, the aggregate declaration, the offset and size of the access
45
      and its type are stored in the structure.
46
 
47
      On a related note, assign_link structures are created for every assign
48
      statement between candidate aggregates and attached to the related
49
      accesses.
50
 
51
   3. The vectors of accesses are analyzed.  They are first sorted according to
52
      their offset and size and then scanned for partially overlapping accesses
53
      (i.e. those which overlap but one is not entirely within another).  Such
54
      an access disqualifies the whole aggregate from being scalarized.
55
 
56
      If there is no such inhibiting overlap, a representative access structure
57
      is chosen for every unique combination of offset and size.  Afterwards,
58
      the pass builds a set of trees from these structures, in which children
59
      of an access are within their parent (in terms of offset and size).
60
 
61
      Then accesses  are propagated  whenever possible (i.e.  in cases  when it
62
      does not create a partially overlapping access) across assign_links from
63
      the right hand side to the left hand side.
64
 
65
      Then the set of trees for each declaration is traversed again and those
66
      accesses which should be replaced by a scalar are identified.
67
 
68
   4. The function is traversed again, and for every reference into an
69
      aggregate that has some component which is about to be scalarized,
70
      statements are amended and new statements are created as necessary.
71
      Finally, if a parameter got scalarized, the scalar replacements are
72
      initialized with values from respective parameter aggregates.  */
73
 
74
#include "config.h"
75
#include "system.h"
76
#include "coretypes.h"
77
#include "alloc-pool.h"
78
#include "tm.h"
79
#include "tree.h"
80
#include "expr.h"
81
#include "gimple.h"
82
#include "cgraph.h"
83
#include "tree-flow.h"
84
#include "ipa-prop.h"
85
#include "diagnostic.h"
86
#include "statistics.h"
87
#include "tree-dump.h"
88
#include "timevar.h"
89
#include "params.h"
90
#include "target.h"
91
#include "flags.h"
92
#include "tree-inline.h"
93
 
94
/* Enumeration of all aggregate reductions we can do.  */
95
enum sra_mode { SRA_MODE_EARLY_IPA,   /* early call regularization */
96
                SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
97
                SRA_MODE_INTRA };     /* late intraprocedural SRA */
98
 
99
/* Global variable describing which aggregate reduction we are performing at
100
   the moment.  */
101
static enum sra_mode sra_mode;
102
 
103
struct assign_link;
104
 
105
/* ACCESS represents each access to an aggregate variable (as a whole or a
106
   part).  It can also represent a group of accesses that refer to exactly the
107
   same fragment of an aggregate (i.e. those that have exactly the same offset
108
   and size).  Such representatives for a single aggregate, once determined,
109
   are linked in a linked list and have the group fields set.
110
 
111
   Moreover, when doing intraprocedural SRA, a tree is built from those
112
   representatives (by the means of first_child and next_sibling pointers), in
113
   which all items in a subtree are "within" the root, i.e. their offset is
114
   greater or equal to offset of the root and offset+size is smaller or equal
115
   to offset+size of the root.  Children of an access are sorted by offset.
116
 
117
   Note that accesses to parts of vector and complex number types always
118
   represented by an access to the whole complex number or a vector.  It is a
119
   duty of the modifying functions to replace them appropriately.  */
120
 
121
struct access
122
{
123
  /* Values returned by  `get_ref_base_and_extent' for each component reference
124
     If EXPR isn't a component reference  just set `BASE = EXPR', `OFFSET = 0',
125
     `SIZE = TREE_SIZE (TREE_TYPE (expr))'.  */
126
  HOST_WIDE_INT offset;
127
  HOST_WIDE_INT size;
128
  tree base;
129
 
130
  /* Expression.  It is context dependent so do not use it to create new
131
     expressions to access the original aggregate.  See PR 42154 for a
132
     testcase.  */
133
  tree expr;
134
  /* Type.  */
135
  tree type;
136
 
137
  /* The statement this access belongs to.  */
138
  gimple stmt;
139
 
140
  /* Next group representative for this aggregate. */
141
  struct access *next_grp;
142
 
143
  /* Pointer to the group representative.  Pointer to itself if the struct is
144
     the representative.  */
145
  struct access *group_representative;
146
 
147
  /* If this access has any children (in terms of the definition above), this
148
     points to the first one.  */
149
  struct access *first_child;
150
 
151
  /* In intraprocedural SRA, pointer to the next sibling in the access tree as
152
     described above.  In IPA-SRA this is a pointer to the next access
153
     belonging to the same group (having the same representative).  */
154
  struct access *next_sibling;
155
 
156
  /* Pointers to the first and last element in the linked list of assign
157
     links.  */
158
  struct assign_link *first_link, *last_link;
159
 
160
  /* Pointer to the next access in the work queue.  */
161
  struct access *next_queued;
162
 
163
  /* Replacement variable for this access "region."  Never to be accessed
164
     directly, always only by the means of get_access_replacement() and only
165
     when grp_to_be_replaced flag is set.  */
166
  tree replacement_decl;
167
 
168
  /* Is this particular access write access? */
169
  unsigned write : 1;
170
 
171
  /* Is this access an artificial one created to scalarize some record
172
     entirely? */
173
  unsigned total_scalarization : 1;
174
 
175
  /* Is this access currently in the work queue?  */
176
  unsigned grp_queued : 1;
177
 
178
  /* Does this group contain a write access?  This flag is propagated down the
179
     access tree.  */
180
  unsigned grp_write : 1;
181
 
182
  /* Does this group contain a read access?  This flag is propagated down the
183
     access tree.  */
184
  unsigned grp_read : 1;
185
 
186
  /* Does this group contain a read access that comes from an assignment
187
     statement?  This flag is propagated down the access tree.  */
188
  unsigned grp_assignment_read : 1;
189
 
190
  /* Other passes of the analysis use this bit to make function
191
     analyze_access_subtree create scalar replacements for this group if
192
     possible.  */
193
  unsigned grp_hint : 1;
194
 
195
  /* Is the subtree rooted in this access fully covered by scalar
196
     replacements?  */
197
  unsigned grp_covered : 1;
198
 
199
  /* If set to true, this access and all below it in an access tree must not be
200
     scalarized.  */
201
  unsigned grp_unscalarizable_region : 1;
202
 
203
  /* Whether data have been written to parts of the aggregate covered by this
204
     access which is not to be scalarized.  This flag is propagated up in the
205
     access tree.  */
206
  unsigned grp_unscalarized_data : 1;
207
 
208
  /* Does this access and/or group contain a write access through a
209
     BIT_FIELD_REF?  */
210
  unsigned grp_partial_lhs : 1;
211
 
212
  /* Set when a scalar replacement should be created for this variable.  We do
213
     the decision and creation at different places because create_tmp_var
214
     cannot be called from within FOR_EACH_REFERENCED_VAR. */
215
  unsigned grp_to_be_replaced : 1;
216
 
217
  /* Is it possible that the group refers to data which might be (directly or
218
     otherwise) modified?  */
219
  unsigned grp_maybe_modified : 1;
220
 
221
  /* Set when this is a representative of a pointer to scalar (i.e. by
222
     reference) parameter which we consider for turning into a plain scalar
223
     (i.e. a by value parameter).  */
224
  unsigned grp_scalar_ptr : 1;
225
 
226
  /* Set when we discover that this pointer is not safe to dereference in the
227
     caller.  */
228
  unsigned grp_not_necessarilly_dereferenced : 1;
229
};
230
 
231
typedef struct access *access_p;
232
 
233
DEF_VEC_P (access_p);
234
DEF_VEC_ALLOC_P (access_p, heap);
235
 
236
/* Alloc pool for allocating access structures.  */
237
static alloc_pool access_pool;
238
 
239
/* A structure linking lhs and rhs accesses from an aggregate assignment.  They
240
   are used to propagate subaccesses from rhs to lhs as long as they don't
241
   conflict with what is already there.  */
242
struct assign_link
243
{
244
  struct access *lacc, *racc;
245
  struct assign_link *next;
246
};
247
 
248
/* Alloc pool for allocating assign link structures.  */
249
static alloc_pool link_pool;
250
 
251
/* Base (tree) -> Vector (VEC(access_p,heap) *) map.  */
252
static struct pointer_map_t *base_access_vec;
253
 
254
/* Bitmap of candidates.  */
255
static bitmap candidate_bitmap;
256
 
257
/* Bitmap of candidates which we should try to entirely scalarize away and
258
   those which cannot be (because they are and need be used as a whole).  */
259
static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
260
 
261
/* Obstack for creation of fancy names.  */
262
static struct obstack name_obstack;
263
 
264
/* Head of a linked list of accesses that need to have its subaccesses
265
   propagated to their assignment counterparts. */
266
static struct access *work_queue_head;
267
 
268
/* Number of parameters of the analyzed function when doing early ipa SRA.  */
269
static int func_param_count;
270
 
271
/* scan_function sets the following to true if it encounters a call to
272
   __builtin_apply_args.  */
273
static bool encountered_apply_args;
274
 
275
/* Set by scan_function when it finds a recursive call with less actual
276
   arguments than formal parameters..  */
277
static bool encountered_unchangable_recursive_call;
278
 
279
/* This is a table in which for each basic block and parameter there is a
280
   distance (offset + size) in that parameter which is dereferenced and
281
   accessed in that BB.  */
282
static HOST_WIDE_INT *bb_dereferences;
283
/* Bitmap of BBs that can cause the function to "stop" progressing by
284
   returning, throwing externally, looping infinitely or calling a function
285
   which might abort etc.. */
286
static bitmap final_bbs;
287
 
288
/* Representative of no accesses at all. */
289
static struct access  no_accesses_representant;
290
 
291
/* Predicate to test the special value.  */
292
 
293
static inline bool
294
no_accesses_p (struct access *access)
295
{
296
  return access == &no_accesses_representant;
297
}
298
 
299
/* Dump contents of ACCESS to file F in a human friendly way.  If GRP is true,
300
   representative fields are dumped, otherwise those which only describe the
301
   individual access are.  */
302
 
303
static struct
304
{
305
  /* Number of processed aggregates is readily available in
306
     analyze_all_variable_accesses and so is not stored here.  */
307
 
308
  /* Number of created scalar replacements.  */
309
  int replacements;
310
 
311
  /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
312
     expression.  */
313
  int exprs;
314
 
315
  /* Number of statements created by generate_subtree_copies.  */
316
  int subtree_copies;
317
 
318
  /* Number of statements created by load_assign_lhs_subreplacements.  */
319
  int subreplacements;
320
 
321
  /* Number of times sra_modify_assign has deleted a statement.  */
322
  int deleted;
323
 
324
  /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
325
     RHS reparately due to type conversions or nonexistent matching
326
     references.  */
327
  int separate_lhs_rhs_handling;
328
 
329
  /* Number of parameters that were removed because they were unused.  */
330
  int deleted_unused_parameters;
331
 
332
  /* Number of scalars passed as parameters by reference that have been
333
     converted to be passed by value.  */
334
  int scalar_by_ref_to_by_val;
335
 
336
  /* Number of aggregate parameters that were replaced by one or more of their
337
     components.  */
338
  int aggregate_params_reduced;
339
 
340
  /* Numbber of components created when splitting aggregate parameters.  */
341
  int param_reductions_created;
342
} sra_stats;
343
 
344
static void
345
dump_access (FILE *f, struct access *access, bool grp)
346
{
347
  fprintf (f, "access { ");
348
  fprintf (f, "base = (%d)'", DECL_UID (access->base));
349
  print_generic_expr (f, access->base, 0);
350
  fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
351
  fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
352
  fprintf (f, ", expr = ");
353
  print_generic_expr (f, access->expr, 0);
354
  fprintf (f, ", type = ");
355
  print_generic_expr (f, access->type, 0);
356
  if (grp)
357
    fprintf (f, ", grp_write = %d, total_scalarization = %d, "
358
             "grp_read = %d, grp_hint = %d, grp_assignment_read = %d,"
359
             "grp_covered = %d, grp_unscalarizable_region = %d, "
360
             "grp_unscalarized_data = %d, grp_partial_lhs = %d, "
361
             "grp_to_be_replaced = %d, grp_maybe_modified = %d, "
362
             "grp_not_necessarilly_dereferenced = %d\n",
363
             access->grp_write, access->total_scalarization,
364
             access->grp_read, access->grp_hint, access->grp_assignment_read,
365
             access->grp_covered, access->grp_unscalarizable_region,
366
             access->grp_unscalarized_data, access->grp_partial_lhs,
367
             access->grp_to_be_replaced, access->grp_maybe_modified,
368
             access->grp_not_necessarilly_dereferenced);
369
  else
370
    fprintf (f, ", write = %d, total_scalarization = %d, "
371
             "grp_partial_lhs = %d\n",
372
             access->write, access->total_scalarization,
373
             access->grp_partial_lhs);
374
}
375
 
376
/* Dump a subtree rooted in ACCESS to file F, indent by LEVEL.  */
377
 
378
static void
379
dump_access_tree_1 (FILE *f, struct access *access, int level)
380
{
381
  do
382
    {
383
      int i;
384
 
385
      for (i = 0; i < level; i++)
386
        fputs ("* ", dump_file);
387
 
388
      dump_access (f, access, true);
389
 
390
      if (access->first_child)
391
        dump_access_tree_1 (f, access->first_child, level + 1);
392
 
393
      access = access->next_sibling;
394
    }
395
  while (access);
396
}
397
 
398
/* Dump all access trees for a variable, given the pointer to the first root in
399
   ACCESS.  */
400
 
401
static void
402
dump_access_tree (FILE *f, struct access *access)
403
{
404
  for (; access; access = access->next_grp)
405
    dump_access_tree_1 (f, access, 0);
406
}
407
 
408
/* Return true iff ACC is non-NULL and has subaccesses.  */
409
 
410
static inline bool
411
access_has_children_p (struct access *acc)
412
{
413
  return acc && acc->first_child;
414
}
415
 
416
/* Return a vector of pointers to accesses for the variable given in BASE or
417
   NULL if there is none.  */
418
 
419
static VEC (access_p, heap) *
420
get_base_access_vector (tree base)
421
{
422
  void **slot;
423
 
424
  slot = pointer_map_contains (base_access_vec, base);
425
  if (!slot)
426
    return NULL;
427
  else
428
    return *(VEC (access_p, heap) **) slot;
429
}
430
 
431
/* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
432
   in ACCESS.  Return NULL if it cannot be found.  */
433
 
434
static struct access *
435
find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
436
                        HOST_WIDE_INT size)
437
{
438
  while (access && (access->offset != offset || access->size != size))
439
    {
440
      struct access *child = access->first_child;
441
 
442
      while (child && (child->offset + child->size <= offset))
443
        child = child->next_sibling;
444
      access = child;
445
    }
446
 
447
  return access;
448
}
449
 
450
/* Return the first group representative for DECL or NULL if none exists.  */
451
 
452
static struct access *
453
get_first_repr_for_decl (tree base)
454
{
455
  VEC (access_p, heap) *access_vec;
456
 
457
  access_vec = get_base_access_vector (base);
458
  if (!access_vec)
459
    return NULL;
460
 
461
  return VEC_index (access_p, access_vec, 0);
462
}
463
 
464
/* Find an access representative for the variable BASE and given OFFSET and
465
   SIZE.  Requires that access trees have already been built.  Return NULL if
466
   it cannot be found.  */
467
 
468
static struct access *
469
get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
470
                                 HOST_WIDE_INT size)
471
{
472
  struct access *access;
473
 
474
  access = get_first_repr_for_decl (base);
475
  while (access && (access->offset + access->size <= offset))
476
    access = access->next_grp;
477
  if (!access)
478
    return NULL;
479
 
480
  return find_access_in_subtree (access, offset, size);
481
}
482
 
483
/* Add LINK to the linked list of assign links of RACC.  */
484
static void
485
add_link_to_rhs (struct access *racc, struct assign_link *link)
486
{
487
  gcc_assert (link->racc == racc);
488
 
489
  if (!racc->first_link)
490
    {
491
      gcc_assert (!racc->last_link);
492
      racc->first_link = link;
493
    }
494
  else
495
    racc->last_link->next = link;
496
 
497
  racc->last_link = link;
498
  link->next = NULL;
499
}
500
 
501
/* Move all link structures in their linked list in OLD_RACC to the linked list
502
   in NEW_RACC.  */
503
static void
504
relink_to_new_repr (struct access *new_racc, struct access *old_racc)
505
{
506
  if (!old_racc->first_link)
507
    {
508
      gcc_assert (!old_racc->last_link);
509
      return;
510
    }
511
 
512
  if (new_racc->first_link)
513
    {
514
      gcc_assert (!new_racc->last_link->next);
515
      gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
516
 
517
      new_racc->last_link->next = old_racc->first_link;
518
      new_racc->last_link = old_racc->last_link;
519
    }
520
  else
521
    {
522
      gcc_assert (!new_racc->last_link);
523
 
524
      new_racc->first_link = old_racc->first_link;
525
      new_racc->last_link = old_racc->last_link;
526
    }
527
  old_racc->first_link = old_racc->last_link = NULL;
528
}
529
 
530
/* Add ACCESS to the work queue (which is actually a stack).  */
531
 
532
static void
533
add_access_to_work_queue (struct access *access)
534
{
535
  if (!access->grp_queued)
536
    {
537
      gcc_assert (!access->next_queued);
538
      access->next_queued = work_queue_head;
539
      access->grp_queued = 1;
540
      work_queue_head = access;
541
    }
542
}
543
 
544
/* Pop an access from the work queue, and return it, assuming there is one.  */
545
 
546
static struct access *
547
pop_access_from_work_queue (void)
548
{
549
  struct access *access = work_queue_head;
550
 
551
  work_queue_head = access->next_queued;
552
  access->next_queued = NULL;
553
  access->grp_queued = 0;
554
  return access;
555
}
556
 
557
 
558
/* Allocate necessary structures.  */
559
 
560
static void
561
sra_initialize (void)
562
{
563
  candidate_bitmap = BITMAP_ALLOC (NULL);
564
  should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
565
  cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
566
  gcc_obstack_init (&name_obstack);
567
  access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
568
  link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
569
  base_access_vec = pointer_map_create ();
570
  memset (&sra_stats, 0, sizeof (sra_stats));
571
  encountered_apply_args = false;
572
  encountered_unchangable_recursive_call = false;
573
}
574
 
575
/* Hook fed to pointer_map_traverse, deallocate stored vectors.  */
576
 
577
static bool
578
delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
579
                     void *data ATTRIBUTE_UNUSED)
580
{
581
  VEC (access_p, heap) *access_vec;
582
  access_vec = (VEC (access_p, heap) *) *value;
583
  VEC_free (access_p, heap, access_vec);
584
 
585
  return true;
586
}
587
 
588
/* Deallocate all general structures.  */
589
 
590
static void
591
sra_deinitialize (void)
592
{
593
  BITMAP_FREE (candidate_bitmap);
594
  BITMAP_FREE (should_scalarize_away_bitmap);
595
  BITMAP_FREE (cannot_scalarize_away_bitmap);
596
  free_alloc_pool (access_pool);
597
  free_alloc_pool (link_pool);
598
  obstack_free (&name_obstack, NULL);
599
 
600
  pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
601
  pointer_map_destroy (base_access_vec);
602
}
603
 
604
/* Remove DECL from candidates for SRA and write REASON to the dump file if
605
   there is one.  */
606
static void
607
disqualify_candidate (tree decl, const char *reason)
608
{
609
  bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
610
 
611
  if (dump_file && (dump_flags & TDF_DETAILS))
612
    {
613
      fprintf (dump_file, "! Disqualifying ");
614
      print_generic_expr (dump_file, decl, 0);
615
      fprintf (dump_file, " - %s\n", reason);
616
    }
617
}
618
 
619
/* Return true iff the type contains a field or an element which does not allow
620
   scalarization.  */
621
 
622
static bool
623
type_internals_preclude_sra_p (tree type)
624
{
625
  tree fld;
626
  tree et;
627
 
628
  switch (TREE_CODE (type))
629
    {
630
    case RECORD_TYPE:
631
    case UNION_TYPE:
632
    case QUAL_UNION_TYPE:
633
      for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
634
        if (TREE_CODE (fld) == FIELD_DECL)
635
          {
636
            tree ft = TREE_TYPE (fld);
637
 
638
            if (TREE_THIS_VOLATILE (fld)
639
                || !DECL_FIELD_OFFSET (fld) || !DECL_SIZE (fld)
640
                || !host_integerp (DECL_FIELD_OFFSET (fld), 1)
641
                || !host_integerp (DECL_SIZE (fld), 1))
642
              return true;
643
 
644
            if (AGGREGATE_TYPE_P (ft)
645
                && type_internals_preclude_sra_p (ft))
646
              return true;
647
          }
648
 
649
      return false;
650
 
651
    case ARRAY_TYPE:
652
      et = TREE_TYPE (type);
653
 
654
      if (AGGREGATE_TYPE_P (et))
655
        return type_internals_preclude_sra_p (et);
656
      else
657
        return false;
658
 
659
    default:
660
      return false;
661
    }
662
}
663
 
664
/* If T is an SSA_NAME, return NULL if it is not a default def or return its
665
   base variable if it is.  Return T if it is not an SSA_NAME.  */
666
 
667
static tree
668
get_ssa_base_param (tree t)
669
{
670
  if (TREE_CODE (t) == SSA_NAME)
671
    {
672
      if (SSA_NAME_IS_DEFAULT_DEF (t))
673
        return SSA_NAME_VAR (t);
674
      else
675
        return NULL_TREE;
676
    }
677
  return t;
678
}
679
 
680
/* Mark a dereference of BASE of distance DIST in a basic block tht STMT
681
   belongs to, unless the BB has already been marked as a potentially
682
   final.  */
683
 
684
static void
685
mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
686
{
687
  basic_block bb = gimple_bb (stmt);
688
  int idx, parm_index = 0;
689
  tree parm;
690
 
691
  if (bitmap_bit_p (final_bbs, bb->index))
692
    return;
693
 
694
  for (parm = DECL_ARGUMENTS (current_function_decl);
695
       parm && parm != base;
696
       parm = TREE_CHAIN (parm))
697
    parm_index++;
698
 
699
  gcc_assert (parm_index < func_param_count);
700
 
701
  idx = bb->index * func_param_count + parm_index;
702
  if (bb_dereferences[idx] < dist)
703
    bb_dereferences[idx] = dist;
704
}
705
 
706
/* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
707
   the three fields.  Also add it to the vector of accesses corresponding to
708
   the base.  Finally, return the new access.  */
709
 
710
static struct access *
711
create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
712
{
713
  VEC (access_p, heap) *vec;
714
  struct access *access;
715
  void **slot;
716
 
717
  access = (struct access *) pool_alloc (access_pool);
718
  memset (access, 0, sizeof (struct access));
719
  access->base = base;
720
  access->offset = offset;
721
  access->size = size;
722
 
723
  slot = pointer_map_contains (base_access_vec, base);
724
  if (slot)
725
    vec = (VEC (access_p, heap) *) *slot;
726
  else
727
    vec = VEC_alloc (access_p, heap, 32);
728
 
729
  VEC_safe_push (access_p, heap, vec, access);
730
 
731
  *((struct VEC (access_p,heap) **)
732
        pointer_map_insert (base_access_vec, base)) = vec;
733
 
734
  return access;
735
}
736
 
737
/* Create and insert access for EXPR. Return created access, or NULL if it is
738
   not possible.  */
739
 
740
static struct access *
741
create_access (tree expr, gimple stmt, bool write)
742
{
743
  struct access *access;
744
  HOST_WIDE_INT offset, size, max_size;
745
  tree base = expr;
746
  bool ptr, unscalarizable_region = false;
747
 
748
  base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
749
 
750
  if (sra_mode == SRA_MODE_EARLY_IPA && INDIRECT_REF_P (base))
751
    {
752
      base = get_ssa_base_param (TREE_OPERAND (base, 0));
753
      if (!base)
754
        return NULL;
755
      ptr = true;
756
    }
757
  else
758
    ptr = false;
759
 
760
  if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
761
    return NULL;
762
 
763
  if (sra_mode == SRA_MODE_EARLY_IPA)
764
    {
765
      if (size < 0 || size != max_size)
766
        {
767
          disqualify_candidate (base, "Encountered a variable sized access.");
768
          return NULL;
769
        }
770
      if ((offset % BITS_PER_UNIT) != 0 || (size % BITS_PER_UNIT) != 0)
771
        {
772
          disqualify_candidate (base,
773
                                "Encountered an acces not aligned to a byte.");
774
          return NULL;
775
        }
776
 
777
      if (ptr)
778
        mark_parm_dereference (base, offset + size, stmt);
779
    }
780
  else
781
    {
782
      if (size != max_size)
783
        {
784
          size = max_size;
785
          unscalarizable_region = true;
786
        }
787
      if (size < 0)
788
        {
789
          disqualify_candidate (base, "Encountered an unconstrained access.");
790
          return NULL;
791
        }
792
    }
793
 
794
  access = create_access_1 (base, offset, size);
795
  access->expr = expr;
796
  access->type = TREE_TYPE (expr);
797
  access->write = write;
798
  access->grp_unscalarizable_region = unscalarizable_region;
799
  access->stmt = stmt;
800
 
801
  return access;
802
}
803
 
804
 
805
/* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
806
   register types or (recursively) records with only these two kinds of fields.
807
   It also returns false if any of these records has a zero-size field as its
808
   last field.  */
809
 
810
static bool
811
type_consists_of_records_p (tree type)
812
{
813
  tree fld;
814
  bool last_fld_has_zero_size = false;
815
 
816
  if (TREE_CODE (type) != RECORD_TYPE)
817
    return false;
818
 
819
  for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
820
    if (TREE_CODE (fld) == FIELD_DECL)
821
      {
822
        tree ft = TREE_TYPE (fld);
823
 
824
        if (!is_gimple_reg_type (ft)
825
            && !type_consists_of_records_p (ft))
826
          return false;
827
 
828
        last_fld_has_zero_size = tree_low_cst (DECL_SIZE (fld), 1) == 0;
829
      }
830
 
831
  if (last_fld_has_zero_size)
832
    return false;
833
 
834
  return true;
835
}
836
 
837
/* Create total_scalarization accesses for all scalar type fields in DECL that
838
   must be of a RECORD_TYPE conforming to type_consists_of_records_p.  BASE
839
   must be the top-most VAR_DECL representing the variable, OFFSET must be the
840
   offset of DECL within BASE.  */
841
 
842
static void
843
completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset)
844
{
845
  tree fld, decl_type = TREE_TYPE (decl);
846
 
847
  for (fld = TYPE_FIELDS (decl_type); fld; fld = TREE_CHAIN (fld))
848
    if (TREE_CODE (fld) == FIELD_DECL)
849
      {
850
        HOST_WIDE_INT pos = offset + int_bit_position (fld);
851
        tree ft = TREE_TYPE (fld);
852
 
853
        if (is_gimple_reg_type (ft))
854
          {
855
            struct access *access;
856
            HOST_WIDE_INT size;
857
            tree expr;
858
            bool ok;
859
 
860
            size = tree_low_cst (DECL_SIZE (fld), 1);
861
            expr = base;
862
            ok = build_ref_for_offset (&expr, TREE_TYPE (base), pos,
863
                                       ft, false);
864
            gcc_assert (ok);
865
 
866
            access = create_access_1 (base, pos, size);
867
            access->expr = expr;
868
            access->type = ft;
869
            access->total_scalarization = 1;
870
            /* Accesses for intraprocedural SRA can have their stmt NULL.  */
871
          }
872
        else
873
          completely_scalarize_record (base, fld, pos);
874
      }
875
}
876
 
877
 
878
/* Search the given tree for a declaration by skipping handled components and
879
   exclude it from the candidates.  */
880
 
881
static void
882
disqualify_base_of_expr (tree t, const char *reason)
883
{
884
  while (handled_component_p (t))
885
    t = TREE_OPERAND (t, 0);
886
 
887
  if (sra_mode == SRA_MODE_EARLY_IPA)
888
    {
889
      if (INDIRECT_REF_P (t))
890
        t = TREE_OPERAND (t, 0);
891
      t = get_ssa_base_param (t);
892
    }
893
 
894
  if (t && DECL_P (t))
895
    disqualify_candidate (t, reason);
896
}
897
 
898
/* Scan expression EXPR and create access structures for all accesses to
899
   candidates for scalarization.  Return the created access or NULL if none is
900
   created.  */
901
 
902
static struct access *
903
build_access_from_expr_1 (tree *expr_ptr, gimple stmt, bool write)
904
{
905
  struct access *ret = NULL;
906
  tree expr = *expr_ptr;
907
  bool partial_ref;
908
 
909
  if (TREE_CODE (expr) == BIT_FIELD_REF
910
      || TREE_CODE (expr) == IMAGPART_EXPR
911
      || TREE_CODE (expr) == REALPART_EXPR)
912
    {
913
      expr = TREE_OPERAND (expr, 0);
914
      partial_ref = true;
915
    }
916
  else
917
    partial_ref = false;
918
 
919
  /* We need to dive through V_C_Es in order to get the size of its parameter
920
     and not the result type.  Ada produces such statements.  We are also
921
     capable of handling the topmost V_C_E but not any of those buried in other
922
     handled components.  */
923
  if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
924
    expr = TREE_OPERAND (expr, 0);
925
 
926
  if (contains_view_convert_expr_p (expr))
927
    {
928
      disqualify_base_of_expr (expr, "V_C_E under a different handled "
929
                               "component.");
930
      return NULL;
931
    }
932
 
933
  switch (TREE_CODE (expr))
934
    {
935
    case INDIRECT_REF:
936
      if (sra_mode != SRA_MODE_EARLY_IPA)
937
        return NULL;
938
      /* fall through */
939
    case VAR_DECL:
940
    case PARM_DECL:
941
    case RESULT_DECL:
942
    case COMPONENT_REF:
943
    case ARRAY_REF:
944
    case ARRAY_RANGE_REF:
945
      ret = create_access (expr, stmt, write);
946
      break;
947
 
948
    default:
949
      break;
950
    }
951
 
952
  if (write && partial_ref && ret)
953
    ret->grp_partial_lhs = 1;
954
 
955
  return ret;
956
}
957
 
958
/* Callback of scan_function.  Scan expression EXPR and create access
959
   structures for all accesses to candidates for scalarization.  Return true if
960
   any access has been inserted.  */
961
 
962
static bool
963
build_access_from_expr (tree *expr_ptr,
964
                        gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED, bool write,
965
                        void *data ATTRIBUTE_UNUSED)
966
{
967
  struct access *access;
968
 
969
  access = build_access_from_expr_1 (expr_ptr, gsi_stmt (*gsi), write);
970
  if (access)
971
    {
972
      /* This means the aggregate is accesses as a whole in a way other than an
973
         assign statement and thus cannot be removed even if we had a scalar
974
         replacement for everything.  */
975
      if (cannot_scalarize_away_bitmap)
976
        bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
977
      return true;
978
    }
979
  return false;
980
}
981
 
982
/* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
983
   modes in which it matters, return true iff they have been disqualified.  RHS
984
   may be NULL, in that case ignore it.  If we scalarize an aggregate in
985
   intra-SRA we may need to add statements after each statement.  This is not
986
   possible if a statement unconditionally has to end the basic block.  */
987
static bool
988
disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
989
{
990
  if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
991
      && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
992
    {
993
      disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
994
      if (rhs)
995
        disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
996
      return true;
997
    }
998
  return false;
999
}
1000
 
1001
 
1002
/* Result code for scan_assign callback for scan_function.  */
1003
enum scan_assign_result { SRA_SA_NONE,       /* nothing done for the stmt */
1004
                          SRA_SA_PROCESSED,  /* stmt analyzed/changed */
1005
                          SRA_SA_REMOVED };  /* stmt redundant and eliminated */
1006
 
1007
 
1008
/* Callback of scan_function.  Scan expressions occuring in the statement
1009
   pointed to by STMT_EXPR, create access structures for all accesses to
1010
   candidates for scalarization and remove those candidates which occur in
1011
   statements or expressions that prevent them from being split apart.  Return
1012
   true if any access has been inserted.  */
1013
 
1014
static enum scan_assign_result
1015
build_accesses_from_assign (gimple *stmt_ptr,
1016
                            gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
1017
                            void *data ATTRIBUTE_UNUSED)
1018
{
1019
  gimple stmt = *stmt_ptr;
1020
  tree *lhs_ptr, *rhs_ptr;
1021
  struct access *lacc, *racc;
1022
 
1023
  if (!gimple_assign_single_p (stmt))
1024
    return SRA_SA_NONE;
1025
 
1026
  lhs_ptr = gimple_assign_lhs_ptr (stmt);
1027
  rhs_ptr = gimple_assign_rhs1_ptr (stmt);
1028
 
1029
  if (disqualify_ops_if_throwing_stmt (stmt, *lhs_ptr, *rhs_ptr))
1030
    return SRA_SA_NONE;
1031
 
1032
  racc = build_access_from_expr_1 (rhs_ptr, stmt, false);
1033
  lacc = build_access_from_expr_1 (lhs_ptr, stmt, true);
1034
 
1035
  if (racc)
1036
    {
1037
      racc->grp_assignment_read = 1;
1038
      if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1039
          && !is_gimple_reg_type (racc->type))
1040
        bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1041
    }
1042
 
1043
  if (lacc && racc
1044
      && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1045
      && !lacc->grp_unscalarizable_region
1046
      && !racc->grp_unscalarizable_region
1047
      && AGGREGATE_TYPE_P (TREE_TYPE (*lhs_ptr))
1048
      /* FIXME: Turn the following line into an assert after PR 40058 is
1049
         fixed.  */
1050
      && lacc->size == racc->size
1051
      && useless_type_conversion_p (lacc->type, racc->type))
1052
    {
1053
      struct assign_link *link;
1054
 
1055
      link = (struct assign_link *) pool_alloc (link_pool);
1056
      memset (link, 0, sizeof (struct assign_link));
1057
 
1058
      link->lacc = lacc;
1059
      link->racc = racc;
1060
 
1061
      add_link_to_rhs (racc, link);
1062
    }
1063
 
1064
  return (lacc || racc) ? SRA_SA_PROCESSED : SRA_SA_NONE;
1065
}
1066
 
1067
/* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1068
   GIMPLE_ASM operands with memory constrains which cannot be scalarized.  */
1069
 
1070
static bool
1071
asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1072
                void *data ATTRIBUTE_UNUSED)
1073
{
1074
  if (DECL_P (op))
1075
    disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1076
 
1077
  return false;
1078
}
1079
 
1080
/* Return true iff callsite CALL has at least as many actual arguments as there
1081
   are formal parameters of the function currently processed by IPA-SRA.  */
1082
 
1083
static inline bool
1084
callsite_has_enough_arguments_p (gimple call)
1085
{
1086
  return gimple_call_num_args (call) >= (unsigned) func_param_count;
1087
}
1088
 
1089
/* Scan function and look for interesting statements. Return true if any has
1090
   been found or processed, as indicated by callbacks.  SCAN_EXPR is a callback
1091
   called on all expressions within statements except assign statements and
1092
   those deemed entirely unsuitable for some reason (all operands in such
1093
   statements and expression are removed from candidate_bitmap).  SCAN_ASSIGN
1094
   is a callback called on all assign statements, HANDLE_SSA_DEFS is a callback
1095
   called on assign statements and those call statements which have a lhs, it
1096
   can be NULL.  ANALYSIS_STAGE is true when running in the analysis stage of a
1097
   pass and thus no statement is being modified.  DATA is a pointer passed to
1098
   all callbacks.  If any single callback returns true, this function also
1099
   returns true, otherwise it returns false.  */
1100
 
1101
static bool
1102
scan_function (bool (*scan_expr) (tree *, gimple_stmt_iterator *, bool, void *),
1103
               enum scan_assign_result (*scan_assign) (gimple *,
1104
                                                       gimple_stmt_iterator *,
1105
                                                       void *),
1106
               bool (*handle_ssa_defs)(gimple, void *),
1107
               bool analysis_stage, void *data)
1108
{
1109
  gimple_stmt_iterator gsi;
1110
  basic_block bb;
1111
  unsigned i;
1112
  tree *t;
1113
  bool ret = false;
1114
 
1115
  FOR_EACH_BB (bb)
1116
    {
1117
      bool bb_changed = false;
1118
 
1119
      if (handle_ssa_defs)
1120
        for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1121
          ret |= handle_ssa_defs (gsi_stmt (gsi), data);
1122
 
1123
      gsi = gsi_start_bb (bb);
1124
      while (!gsi_end_p (gsi))
1125
        {
1126
          gimple stmt = gsi_stmt (gsi);
1127
          enum scan_assign_result assign_result;
1128
          bool any = false, deleted = false;
1129
 
1130
          if (analysis_stage && final_bbs && stmt_can_throw_external (stmt))
1131
            bitmap_set_bit (final_bbs, bb->index);
1132
          switch (gimple_code (stmt))
1133
            {
1134
            case GIMPLE_RETURN:
1135
              t = gimple_return_retval_ptr (stmt);
1136
              if (*t != NULL_TREE)
1137
                any |= scan_expr (t, &gsi, false, data);
1138
              if (analysis_stage && final_bbs)
1139
                bitmap_set_bit (final_bbs, bb->index);
1140
              break;
1141
 
1142
            case GIMPLE_ASSIGN:
1143
              assign_result = scan_assign (&stmt, &gsi, data);
1144
              any |= assign_result == SRA_SA_PROCESSED;
1145
              deleted = assign_result == SRA_SA_REMOVED;
1146
              if (handle_ssa_defs && assign_result != SRA_SA_REMOVED)
1147
                any |= handle_ssa_defs (stmt, data);
1148
              break;
1149
 
1150
            case GIMPLE_CALL:
1151
              /* Operands must be processed before the lhs.  */
1152
              for (i = 0; i < gimple_call_num_args (stmt); i++)
1153
                {
1154
                  tree *argp = gimple_call_arg_ptr (stmt, i);
1155
                  any |= scan_expr (argp, &gsi, false, data);
1156
                }
1157
 
1158
              if (analysis_stage && sra_mode == SRA_MODE_EARLY_IPA)
1159
                {
1160
                  tree dest = gimple_call_fndecl (stmt);
1161
                  int flags = gimple_call_flags (stmt);
1162
 
1163
                  if (dest)
1164
                    {
1165
                      if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1166
                          && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1167
                        encountered_apply_args = true;
1168
                      if (cgraph_get_node (dest)
1169
                          == cgraph_get_node (current_function_decl)
1170
                          && !callsite_has_enough_arguments_p (stmt))
1171
                        encountered_unchangable_recursive_call = true;
1172
                    }
1173
 
1174
                  if (final_bbs
1175
                      && (flags & (ECF_CONST | ECF_PURE)) == 0)
1176
                    bitmap_set_bit (final_bbs, bb->index);
1177
                }
1178
 
1179
              if (gimple_call_lhs (stmt))
1180
                {
1181
                  tree *lhs_ptr = gimple_call_lhs_ptr (stmt);
1182
                  if (!analysis_stage
1183
                      || !disqualify_ops_if_throwing_stmt (stmt,
1184
                                                           *lhs_ptr, NULL))
1185
                    {
1186
                      any |= scan_expr (lhs_ptr, &gsi, true, data);
1187
                      if (handle_ssa_defs)
1188
                        any |= handle_ssa_defs (stmt, data);
1189
                    }
1190
                }
1191
              break;
1192
 
1193
            case GIMPLE_ASM:
1194
              if (analysis_stage)
1195
                {
1196
                  walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1197
                                                 asm_visit_addr);
1198
                  if (final_bbs)
1199
                    bitmap_set_bit (final_bbs, bb->index);
1200
                }
1201
              for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1202
                {
1203
                  tree *op = &TREE_VALUE (gimple_asm_input_op (stmt, i));
1204
                  any |= scan_expr (op, &gsi, false, data);
1205
                }
1206
              for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1207
                {
1208
                  tree *op = &TREE_VALUE (gimple_asm_output_op (stmt, i));
1209
                  any |= scan_expr (op, &gsi, true, data);
1210
                }
1211
              break;
1212
 
1213
            default:
1214
              break;
1215
            }
1216
 
1217
          if (any)
1218
            {
1219
              ret = true;
1220
 
1221
              if (!analysis_stage)
1222
                {
1223
                  bb_changed = true;
1224
                  update_stmt (stmt);
1225
                  maybe_clean_eh_stmt (stmt);
1226
                }
1227
            }
1228
          if (deleted)
1229
            bb_changed = true;
1230
          else
1231
            {
1232
              gsi_next (&gsi);
1233
              ret = true;
1234
            }
1235
        }
1236
      if (!analysis_stage && bb_changed && sra_mode == SRA_MODE_EARLY_IPA)
1237
        gimple_purge_dead_eh_edges (bb);
1238
    }
1239
 
1240
  return ret;
1241
}
1242
 
1243
/* Helper of QSORT function. There are pointers to accesses in the array.  An
1244
   access is considered smaller than another if it has smaller offset or if the
1245
   offsets are the same but is size is bigger. */
1246
 
1247
static int
1248
compare_access_positions (const void *a, const void *b)
1249
{
1250
  const access_p *fp1 = (const access_p *) a;
1251
  const access_p *fp2 = (const access_p *) b;
1252
  const access_p f1 = *fp1;
1253
  const access_p f2 = *fp2;
1254
 
1255
  if (f1->offset != f2->offset)
1256
    return f1->offset < f2->offset ? -1 : 1;
1257
 
1258
  if (f1->size == f2->size)
1259
    {
1260
      if (f1->type == f2->type)
1261
        return 0;
1262
      /* Put any non-aggregate type before any aggregate type.  */
1263
      else if (!is_gimple_reg_type (f1->type)
1264
          && is_gimple_reg_type (f2->type))
1265
        return 1;
1266
      else if (is_gimple_reg_type (f1->type)
1267
               && !is_gimple_reg_type (f2->type))
1268
        return -1;
1269
      /* Put any complex or vector type before any other scalar type.  */
1270
      else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1271
               && TREE_CODE (f1->type) != VECTOR_TYPE
1272
               && (TREE_CODE (f2->type) == COMPLEX_TYPE
1273
                   || TREE_CODE (f2->type) == VECTOR_TYPE))
1274
        return 1;
1275
      else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1276
                || TREE_CODE (f1->type) == VECTOR_TYPE)
1277
               && TREE_CODE (f2->type) != COMPLEX_TYPE
1278
               && TREE_CODE (f2->type) != VECTOR_TYPE)
1279
        return -1;
1280
      /* Put the integral type with the bigger precision first.  */
1281
      else if (INTEGRAL_TYPE_P (f1->type)
1282
               && INTEGRAL_TYPE_P (f2->type))
1283
        return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1284
      /* Put any integral type with non-full precision last.  */
1285
      else if (INTEGRAL_TYPE_P (f1->type)
1286
               && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1287
                   != TYPE_PRECISION (f1->type)))
1288
        return 1;
1289
      else if (INTEGRAL_TYPE_P (f2->type)
1290
               && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1291
                   != TYPE_PRECISION (f2->type)))
1292
        return -1;
1293
      /* Stabilize the sort.  */
1294
      return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1295
    }
1296
 
1297
  /* We want the bigger accesses first, thus the opposite operator in the next
1298
     line: */
1299
  return f1->size > f2->size ? -1 : 1;
1300
}
1301
 
1302
 
1303
/* Append a name of the declaration to the name obstack.  A helper function for
1304
   make_fancy_name.  */
1305
 
1306
static void
1307
make_fancy_decl_name (tree decl)
1308
{
1309
  char buffer[32];
1310
 
1311
  tree name = DECL_NAME (decl);
1312
  if (name)
1313
    obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1314
                  IDENTIFIER_LENGTH (name));
1315
  else
1316
    {
1317
      sprintf (buffer, "D%u", DECL_UID (decl));
1318
      obstack_grow (&name_obstack, buffer, strlen (buffer));
1319
    }
1320
}
1321
 
1322
/* Helper for make_fancy_name.  */
1323
 
1324
static void
1325
make_fancy_name_1 (tree expr)
1326
{
1327
  char buffer[32];
1328
  tree index;
1329
 
1330
  if (DECL_P (expr))
1331
    {
1332
      make_fancy_decl_name (expr);
1333
      return;
1334
    }
1335
 
1336
  switch (TREE_CODE (expr))
1337
    {
1338
    case COMPONENT_REF:
1339
      make_fancy_name_1 (TREE_OPERAND (expr, 0));
1340
      obstack_1grow (&name_obstack, '$');
1341
      make_fancy_decl_name (TREE_OPERAND (expr, 1));
1342
      break;
1343
 
1344
    case ARRAY_REF:
1345
      make_fancy_name_1 (TREE_OPERAND (expr, 0));
1346
      obstack_1grow (&name_obstack, '$');
1347
      /* Arrays with only one element may not have a constant as their
1348
         index. */
1349
      index = TREE_OPERAND (expr, 1);
1350
      if (TREE_CODE (index) != INTEGER_CST)
1351
        break;
1352
      sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1353
      obstack_grow (&name_obstack, buffer, strlen (buffer));
1354
 
1355
      break;
1356
 
1357
    case BIT_FIELD_REF:
1358
    case REALPART_EXPR:
1359
    case IMAGPART_EXPR:
1360
      gcc_unreachable ();       /* we treat these as scalars.  */
1361
      break;
1362
    default:
1363
      break;
1364
    }
1365
}
1366
 
1367
/* Create a human readable name for replacement variable of ACCESS.  */
1368
 
1369
static char *
1370
make_fancy_name (tree expr)
1371
{
1372
  make_fancy_name_1 (expr);
1373
  obstack_1grow (&name_obstack, '\0');
1374
  return XOBFINISH (&name_obstack, char *);
1375
}
1376
 
1377
/* Helper function for build_ref_for_offset.  */
1378
 
1379
static bool
1380
build_ref_for_offset_1 (tree *res, tree type, HOST_WIDE_INT offset,
1381
                        tree exp_type)
1382
{
1383
  while (1)
1384
    {
1385
      tree fld;
1386
      tree tr_size, index, minidx;
1387
      HOST_WIDE_INT el_size;
1388
 
1389
      if (offset == 0 && exp_type
1390
          && types_compatible_p (exp_type, type))
1391
        return true;
1392
 
1393
      switch (TREE_CODE (type))
1394
        {
1395
        case UNION_TYPE:
1396
        case QUAL_UNION_TYPE:
1397
        case RECORD_TYPE:
1398
          for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
1399
            {
1400
              HOST_WIDE_INT pos, size;
1401
              tree expr, *expr_ptr;
1402
 
1403
              if (TREE_CODE (fld) != FIELD_DECL)
1404
                continue;
1405
 
1406
              pos = int_bit_position (fld);
1407
              gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1408
              tr_size = DECL_SIZE (fld);
1409
              if (!tr_size || !host_integerp (tr_size, 1))
1410
                continue;
1411
              size = tree_low_cst (tr_size, 1);
1412
              if (size == 0)
1413
                {
1414
                  if (pos != offset)
1415
                    continue;
1416
                }
1417
              else if (pos > offset || (pos + size) <= offset)
1418
                continue;
1419
 
1420
              if (res)
1421
                {
1422
                  expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1423
                                 NULL_TREE);
1424
                  expr_ptr = &expr;
1425
                }
1426
              else
1427
                expr_ptr = NULL;
1428
              if (build_ref_for_offset_1 (expr_ptr, TREE_TYPE (fld),
1429
                                          offset - pos, exp_type))
1430
                {
1431
                  if (res)
1432
                    *res = expr;
1433
                  return true;
1434
                }
1435
            }
1436
          return false;
1437
 
1438
        case ARRAY_TYPE:
1439
          tr_size = TYPE_SIZE (TREE_TYPE (type));
1440
          if (!tr_size || !host_integerp (tr_size, 1))
1441
            return false;
1442
          el_size = tree_low_cst (tr_size, 1);
1443
 
1444
          minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1445
          if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1446
            return false;
1447
          if (res)
1448
            {
1449
              index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1450
              if (!integer_zerop (minidx))
1451
                index = int_const_binop (PLUS_EXPR, index, minidx, 0);
1452
              *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1453
                             NULL_TREE, NULL_TREE);
1454
            }
1455
          offset = offset % el_size;
1456
          type = TREE_TYPE (type);
1457
          break;
1458
 
1459
        default:
1460
          if (offset != 0)
1461
            return false;
1462
 
1463
          if (exp_type)
1464
            return false;
1465
          else
1466
            return true;
1467
        }
1468
    }
1469
}
1470
 
1471
/* Construct an expression that would reference a part of aggregate *EXPR of
1472
   type TYPE at the given OFFSET of the type EXP_TYPE.  If EXPR is NULL, the
1473
   function only determines whether it can build such a reference without
1474
   actually doing it, otherwise, the tree it points to is unshared first and
1475
   then used as a base for furhter sub-references.
1476
 
1477
   FIXME: Eventually this should be replaced with
1478
   maybe_fold_offset_to_reference() from tree-ssa-ccp.c but that requires a
1479
   minor rewrite of fold_stmt.
1480
 */
1481
 
1482
bool
1483
build_ref_for_offset (tree *expr, tree type, HOST_WIDE_INT offset,
1484
                      tree exp_type, bool allow_ptr)
1485
{
1486
  location_t loc = expr ? EXPR_LOCATION (*expr) : UNKNOWN_LOCATION;
1487
 
1488
  if (expr)
1489
    *expr = unshare_expr (*expr);
1490
 
1491
  if (allow_ptr && POINTER_TYPE_P (type))
1492
    {
1493
      type = TREE_TYPE (type);
1494
      if (expr)
1495
        *expr = fold_build1_loc (loc, INDIRECT_REF, type, *expr);
1496
    }
1497
 
1498
  return build_ref_for_offset_1 (expr, type, offset, exp_type);
1499
}
1500
 
1501
/* Return true iff TYPE is stdarg va_list type.  */
1502
 
1503
static inline bool
1504
is_va_list_type (tree type)
1505
{
1506
  return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1507
}
1508
 
1509
/* The very first phase of intraprocedural SRA.  It marks in candidate_bitmap
1510
   those with type which is suitable for scalarization.  */
1511
 
1512
static bool
1513
find_var_candidates (void)
1514
{
1515
  tree var, type;
1516
  referenced_var_iterator rvi;
1517
  bool ret = false;
1518
 
1519
  FOR_EACH_REFERENCED_VAR (var, rvi)
1520
    {
1521
      if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1522
        continue;
1523
      type = TREE_TYPE (var);
1524
 
1525
      if (!AGGREGATE_TYPE_P (type)
1526
          || needs_to_live_in_memory (var)
1527
          || TREE_THIS_VOLATILE (var)
1528
          || !COMPLETE_TYPE_P (type)
1529
          || !host_integerp (TYPE_SIZE (type), 1)
1530
          || tree_low_cst (TYPE_SIZE (type), 1) == 0
1531
          || type_internals_preclude_sra_p (type)
1532
          /* Fix for PR 41089.  tree-stdarg.c needs to have va_lists intact but
1533
              we also want to schedule it rather late.  Thus we ignore it in
1534
              the early pass. */
1535
          || (sra_mode == SRA_MODE_EARLY_INTRA
1536
              && is_va_list_type (type)))
1537
        continue;
1538
 
1539
      bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1540
 
1541
      if (dump_file && (dump_flags & TDF_DETAILS))
1542
        {
1543
          fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1544
          print_generic_expr (dump_file, var, 0);
1545
          fprintf (dump_file, "\n");
1546
        }
1547
      ret = true;
1548
    }
1549
 
1550
  return ret;
1551
}
1552
 
1553
/* Sort all accesses for the given variable, check for partial overlaps and
1554
   return NULL if there are any.  If there are none, pick a representative for
1555
   each combination of offset and size and create a linked list out of them.
1556
   Return the pointer to the first representative and make sure it is the first
1557
   one in the vector of accesses.  */
1558
 
1559
static struct access *
1560
sort_and_splice_var_accesses (tree var)
1561
{
1562
  int i, j, access_count;
1563
  struct access *res, **prev_acc_ptr = &res;
1564
  VEC (access_p, heap) *access_vec;
1565
  bool first = true;
1566
  HOST_WIDE_INT low = -1, high = 0;
1567
 
1568
  access_vec = get_base_access_vector (var);
1569
  if (!access_vec)
1570
    return NULL;
1571
  access_count = VEC_length (access_p, access_vec);
1572
 
1573
  /* Sort by <OFFSET, SIZE>.  */
1574
  qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
1575
         compare_access_positions);
1576
 
1577
  i = 0;
1578
  while (i < access_count)
1579
    {
1580
      struct access *access = VEC_index (access_p, access_vec, i);
1581
      bool grp_write = access->write;
1582
      bool grp_read = !access->write;
1583
      bool grp_assignment_read = access->grp_assignment_read;
1584
      bool multiple_reads = false;
1585
      bool total_scalarization = access->total_scalarization;
1586
      bool grp_partial_lhs = access->grp_partial_lhs;
1587
      bool first_scalar = is_gimple_reg_type (access->type);
1588
      bool unscalarizable_region = access->grp_unscalarizable_region;
1589
 
1590
      if (first || access->offset >= high)
1591
        {
1592
          first = false;
1593
          low = access->offset;
1594
          high = access->offset + access->size;
1595
        }
1596
      else if (access->offset > low && access->offset + access->size > high)
1597
        return NULL;
1598
      else
1599
        gcc_assert (access->offset >= low
1600
                    && access->offset + access->size <= high);
1601
 
1602
      j = i + 1;
1603
      while (j < access_count)
1604
        {
1605
          struct access *ac2 = VEC_index (access_p, access_vec, j);
1606
          if (ac2->offset != access->offset || ac2->size != access->size)
1607
            break;
1608
          if (ac2->write)
1609
            grp_write = true;
1610
          else
1611
            {
1612
              if (grp_read)
1613
                multiple_reads = true;
1614
              else
1615
                grp_read = true;
1616
            }
1617
          grp_assignment_read |= ac2->grp_assignment_read;
1618
          grp_partial_lhs |= ac2->grp_partial_lhs;
1619
          unscalarizable_region |= ac2->grp_unscalarizable_region;
1620
          total_scalarization |= ac2->total_scalarization;
1621
          relink_to_new_repr (access, ac2);
1622
 
1623
          /* If there are both aggregate-type and scalar-type accesses with
1624
             this combination of size and offset, the comparison function
1625
             should have put the scalars first.  */
1626
          gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1627
          ac2->group_representative = access;
1628
          j++;
1629
        }
1630
 
1631
      i = j;
1632
 
1633
      access->group_representative = access;
1634
      access->grp_write = grp_write;
1635
      access->grp_read = grp_read;
1636
      access->grp_assignment_read = grp_assignment_read;
1637
      access->grp_hint = multiple_reads || total_scalarization;
1638
      access->grp_partial_lhs = grp_partial_lhs;
1639
      access->grp_unscalarizable_region = unscalarizable_region;
1640
      if (access->first_link)
1641
        add_access_to_work_queue (access);
1642
 
1643
      *prev_acc_ptr = access;
1644
      prev_acc_ptr = &access->next_grp;
1645
    }
1646
 
1647
  gcc_assert (res == VEC_index (access_p, access_vec, 0));
1648
  return res;
1649
}
1650
 
1651
/* Create a variable for the given ACCESS which determines the type, name and a
1652
   few other properties.  Return the variable declaration and store it also to
1653
   ACCESS->replacement.  */
1654
 
1655
static tree
1656
create_access_replacement (struct access *access, bool rename)
1657
{
1658
  tree repl;
1659
 
1660
  repl = create_tmp_var (access->type, "SR");
1661
  get_var_ann (repl);
1662
  add_referenced_var (repl);
1663
  if (rename)
1664
    mark_sym_for_renaming (repl);
1665
 
1666
  if (!access->grp_partial_lhs
1667
      && (TREE_CODE (access->type) == COMPLEX_TYPE
1668
          || TREE_CODE (access->type) == VECTOR_TYPE))
1669
    DECL_GIMPLE_REG_P (repl) = 1;
1670
 
1671
  DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1672
  DECL_ARTIFICIAL (repl) = 1;
1673
  DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1674
 
1675
  if (DECL_NAME (access->base)
1676
      && !DECL_IGNORED_P (access->base)
1677
      && !DECL_ARTIFICIAL (access->base))
1678
    {
1679
      char *pretty_name = make_fancy_name (access->expr);
1680
 
1681
      DECL_NAME (repl) = get_identifier (pretty_name);
1682
      obstack_free (&name_obstack, pretty_name);
1683
 
1684
      SET_DECL_DEBUG_EXPR (repl, access->expr);
1685
      DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1686
      TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1687
    }
1688
  else
1689
    TREE_NO_WARNING (repl) = 1;
1690
 
1691
  if (dump_file)
1692
    {
1693
      fprintf (dump_file, "Created a replacement for ");
1694
      print_generic_expr (dump_file, access->base, 0);
1695
      fprintf (dump_file, " offset: %u, size: %u: ",
1696
               (unsigned) access->offset, (unsigned) access->size);
1697
      print_generic_expr (dump_file, repl, 0);
1698
      fprintf (dump_file, "\n");
1699
    }
1700
  sra_stats.replacements++;
1701
 
1702
  return repl;
1703
}
1704
 
1705
/* Return ACCESS scalar replacement, create it if it does not exist yet.  */
1706
 
1707
static inline tree
1708
get_access_replacement (struct access *access)
1709
{
1710
  gcc_assert (access->grp_to_be_replaced);
1711
 
1712
  if (!access->replacement_decl)
1713
    access->replacement_decl = create_access_replacement (access, true);
1714
  return access->replacement_decl;
1715
}
1716
 
1717
/* Return ACCESS scalar replacement, create it if it does not exist yet but do
1718
   not mark it for renaming.  */
1719
 
1720
static inline tree
1721
get_unrenamed_access_replacement (struct access *access)
1722
{
1723
  gcc_assert (!access->grp_to_be_replaced);
1724
 
1725
  if (!access->replacement_decl)
1726
    access->replacement_decl = create_access_replacement (access, false);
1727
  return access->replacement_decl;
1728
}
1729
 
1730
/* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1731
   linked list along the way.  Stop when *ACCESS is NULL or the access pointed
1732
   to it is not "within" the root.  Return false iff some accesses partially
1733
   overlap.  */
1734
 
1735
static bool
1736
build_access_subtree (struct access **access)
1737
{
1738
  struct access *root = *access, *last_child = NULL;
1739
  HOST_WIDE_INT limit = root->offset + root->size;
1740
 
1741
  *access = (*access)->next_grp;
1742
  while  (*access && (*access)->offset + (*access)->size <= limit)
1743
    {
1744
      if (!last_child)
1745
        root->first_child = *access;
1746
      else
1747
        last_child->next_sibling = *access;
1748
      last_child = *access;
1749
 
1750
      if (!build_access_subtree (access))
1751
        return false;
1752
    }
1753
 
1754
  if (*access && (*access)->offset < limit)
1755
    return false;
1756
 
1757
  return true;
1758
}
1759
 
1760
/* Build a tree of access representatives, ACCESS is the pointer to the first
1761
   one, others are linked in a list by the next_grp field.  Return false iff
1762
   some accesses partially overlap.  */
1763
 
1764
static bool
1765
build_access_trees (struct access *access)
1766
{
1767
  while (access)
1768
    {
1769
      struct access *root = access;
1770
 
1771
      if (!build_access_subtree (&access))
1772
        return false;
1773
      root->next_grp = access;
1774
    }
1775
  return true;
1776
}
1777
 
1778
/* Return true if expr contains some ARRAY_REFs into a variable bounded
1779
   array.  */
1780
 
1781
static bool
1782
expr_with_var_bounded_array_refs_p (tree expr)
1783
{
1784
  while (handled_component_p (expr))
1785
    {
1786
      if (TREE_CODE (expr) == ARRAY_REF
1787
          && !host_integerp (array_ref_low_bound (expr), 0))
1788
        return true;
1789
      expr = TREE_OPERAND (expr, 0);
1790
    }
1791
  return false;
1792
}
1793
 
1794
enum mark_read_status { SRA_MR_NOT_READ, SRA_MR_READ, SRA_MR_ASSIGN_READ};
1795
 
1796
/* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1797
   both seeming beneficial and when ALLOW_REPLACEMENTS allows it.  Also set all
1798
   sorts of access flags appropriately along the way, notably always set
1799
   grp_read and grp_assign_read according to MARK_READ and grp_write when
1800
   MARK_WRITE is true.  */
1801
 
1802
static bool
1803
analyze_access_subtree (struct access *root, bool allow_replacements,
1804
                        enum mark_read_status mark_read, bool mark_write)
1805
{
1806
  struct access *child;
1807
  HOST_WIDE_INT limit = root->offset + root->size;
1808
  HOST_WIDE_INT covered_to = root->offset;
1809
  bool scalar = is_gimple_reg_type (root->type);
1810
  bool hole = false, sth_created = false;
1811
  bool direct_read = root->grp_read;
1812
 
1813
  if (mark_read == SRA_MR_ASSIGN_READ)
1814
    {
1815
      root->grp_read = 1;
1816
      root->grp_assignment_read = 1;
1817
    }
1818
  if (mark_read == SRA_MR_READ)
1819
    root->grp_read = 1;
1820
  else if (root->grp_assignment_read)
1821
    mark_read = SRA_MR_ASSIGN_READ;
1822
  else if (root->grp_read)
1823
    mark_read = SRA_MR_READ;
1824
 
1825
  if (mark_write)
1826
    root->grp_write = true;
1827
  else if (root->grp_write)
1828
    mark_write = true;
1829
 
1830
  if (root->grp_unscalarizable_region)
1831
    allow_replacements = false;
1832
 
1833
  if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
1834
    allow_replacements = false;
1835
 
1836
  for (child = root->first_child; child; child = child->next_sibling)
1837
    {
1838
      if (!hole && child->offset < covered_to)
1839
        hole = true;
1840
      else
1841
        covered_to += child->size;
1842
 
1843
      sth_created |= analyze_access_subtree (child,
1844
                                             allow_replacements && !scalar,
1845
                                             mark_read, mark_write);
1846
 
1847
      root->grp_unscalarized_data |= child->grp_unscalarized_data;
1848
      hole |= !child->grp_covered;
1849
    }
1850
 
1851
  if (allow_replacements && scalar && !root->first_child
1852
      && (root->grp_hint
1853
          || (root->grp_write && (direct_read || root->grp_assignment_read)))
1854
      /* We must not ICE later on when trying to build an access to the
1855
         original data within the aggregate even when it is impossible to do in
1856
         a defined way like in the PR 42703 testcase.  Therefore we check
1857
         pre-emptively here that we will be able to do that.  */
1858
      && build_ref_for_offset (NULL, TREE_TYPE (root->base), root->offset,
1859
                               root->type, false))
1860
    {
1861
      if (dump_file && (dump_flags & TDF_DETAILS))
1862
        {
1863
          fprintf (dump_file, "Marking ");
1864
          print_generic_expr (dump_file, root->base, 0);
1865
          fprintf (dump_file, " offset: %u, size: %u: ",
1866
                   (unsigned) root->offset, (unsigned) root->size);
1867
          fprintf (dump_file, " to be replaced.\n");
1868
        }
1869
 
1870
      root->grp_to_be_replaced = 1;
1871
      sth_created = true;
1872
      hole = false;
1873
    }
1874
  else if (covered_to < limit)
1875
    hole = true;
1876
 
1877
  if (sth_created && !hole)
1878
    {
1879
      root->grp_covered = 1;
1880
      return true;
1881
    }
1882
  if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
1883
    root->grp_unscalarized_data = 1; /* not covered and written to */
1884
  if (sth_created)
1885
    return true;
1886
  return false;
1887
}
1888
 
1889
/* Analyze all access trees linked by next_grp by the means of
1890
   analyze_access_subtree.  */
1891
static bool
1892
analyze_access_trees (struct access *access)
1893
{
1894
  bool ret = false;
1895
 
1896
  while (access)
1897
    {
1898
      if (analyze_access_subtree (access, true, SRA_MR_NOT_READ, false))
1899
        ret = true;
1900
      access = access->next_grp;
1901
    }
1902
 
1903
  return ret;
1904
}
1905
 
1906
/* Return true iff a potential new child of LACC at offset OFFSET and with size
1907
   SIZE would conflict with an already existing one.  If exactly such a child
1908
   already exists in LACC, store a pointer to it in EXACT_MATCH.  */
1909
 
1910
static bool
1911
child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
1912
                              HOST_WIDE_INT size, struct access **exact_match)
1913
{
1914
  struct access *child;
1915
 
1916
  for (child = lacc->first_child; child; child = child->next_sibling)
1917
    {
1918
      if (child->offset == norm_offset && child->size == size)
1919
        {
1920
          *exact_match = child;
1921
          return true;
1922
        }
1923
 
1924
      if (child->offset < norm_offset + size
1925
          && child->offset + child->size > norm_offset)
1926
        return true;
1927
    }
1928
 
1929
  return false;
1930
}
1931
 
1932
/* Create a new child access of PARENT, with all properties just like MODEL
1933
   except for its offset and with its grp_write false and grp_read true.
1934
   Return the new access or NULL if it cannot be created.  Note that this access
1935
   is created long after all splicing and sorting, it's not located in any
1936
   access vector and is automatically a representative of its group.  */
1937
 
1938
static struct access *
1939
create_artificial_child_access (struct access *parent, struct access *model,
1940
                                HOST_WIDE_INT new_offset)
1941
{
1942
  struct access *access;
1943
  struct access **child;
1944
  tree expr = parent->base;;
1945
 
1946
  gcc_assert (!model->grp_unscalarizable_region);
1947
 
1948
  if (!build_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
1949
                             model->type, false))
1950
    return NULL;
1951
 
1952
  access = (struct access *) pool_alloc (access_pool);
1953
  memset (access, 0, sizeof (struct access));
1954
  access->base = parent->base;
1955
  access->expr = expr;
1956
  access->offset = new_offset;
1957
  access->size = model->size;
1958
  access->type = model->type;
1959
  access->grp_write = true;
1960
  access->grp_read = false;
1961
 
1962
  child = &parent->first_child;
1963
  while (*child && (*child)->offset < new_offset)
1964
    child = &(*child)->next_sibling;
1965
 
1966
  access->next_sibling = *child;
1967
  *child = access;
1968
 
1969
  return access;
1970
}
1971
 
1972
 
1973
/* Propagate all subaccesses of RACC across an assignment link to LACC. Return
1974
   true if any new subaccess was created.  Additionally, if RACC is a scalar
1975
   access but LACC is not, change the type of the latter, if possible.  */
1976
 
1977
static bool
1978
propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
1979
{
1980
  struct access *rchild;
1981
  HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
1982
  bool ret = false;
1983
 
1984
  if (is_gimple_reg_type (lacc->type)
1985
      || lacc->grp_unscalarizable_region
1986
      || racc->grp_unscalarizable_region)
1987
    return false;
1988
 
1989
  if (!lacc->first_child && !racc->first_child
1990
      && is_gimple_reg_type (racc->type))
1991
    {
1992
      tree t = lacc->base;
1993
 
1994
      if (build_ref_for_offset (&t, TREE_TYPE (t), lacc->offset, racc->type,
1995
                                false))
1996
        {
1997
          lacc->expr = t;
1998
          lacc->type = racc->type;
1999
        }
2000
      return false;
2001
    }
2002
 
2003
  for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2004
    {
2005
      struct access *new_acc = NULL;
2006
      HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2007
 
2008
      if (rchild->grp_unscalarizable_region)
2009
        continue;
2010
 
2011
      if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2012
                                        &new_acc))
2013
        {
2014
          if (new_acc)
2015
            {
2016
              rchild->grp_hint = 1;
2017
              new_acc->grp_hint |= new_acc->grp_read;
2018
              if (rchild->first_child)
2019
                ret |= propagate_subaccesses_across_link (new_acc, rchild);
2020
            }
2021
          continue;
2022
        }
2023
 
2024
      /* If a (part of) a union field is on the RHS of an assignment, it can
2025
         have sub-accesses which do not make sense on the LHS (PR 40351).
2026
         Check that this is not the case.  */
2027
      if (!build_ref_for_offset (NULL, TREE_TYPE (lacc->base), norm_offset,
2028
                                 rchild->type, false))
2029
        continue;
2030
 
2031
      rchild->grp_hint = 1;
2032
      new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2033
      if (new_acc)
2034
        {
2035
          ret = true;
2036
          if (racc->first_child)
2037
            propagate_subaccesses_across_link (new_acc, rchild);
2038
        }
2039
    }
2040
 
2041
  return ret;
2042
}
2043
 
2044
/* Propagate all subaccesses across assignment links.  */
2045
 
2046
static void
2047
propagate_all_subaccesses (void)
2048
{
2049
  while (work_queue_head)
2050
    {
2051
      struct access *racc = pop_access_from_work_queue ();
2052
      struct assign_link *link;
2053
 
2054
      gcc_assert (racc->first_link);
2055
 
2056
      for (link = racc->first_link; link; link = link->next)
2057
        {
2058
          struct access *lacc = link->lacc;
2059
 
2060
          if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2061
            continue;
2062
          lacc = lacc->group_representative;
2063
          if (propagate_subaccesses_across_link (lacc, racc)
2064
              && lacc->first_link)
2065
            add_access_to_work_queue (lacc);
2066
        }
2067
    }
2068
}
2069
 
2070
/* Go through all accesses collected throughout the (intraprocedural) analysis
2071
   stage, exclude overlapping ones, identify representatives and build trees
2072
   out of them, making decisions about scalarization on the way.  Return true
2073
   iff there are any to-be-scalarized variables after this stage. */
2074
 
2075
static bool
2076
analyze_all_variable_accesses (void)
2077
{
2078
  int res = 0;
2079
  bitmap tmp = BITMAP_ALLOC (NULL);
2080
  bitmap_iterator bi;
2081
  unsigned i, max_total_scalarization_size;
2082
 
2083
  max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2084
    * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2085
 
2086
  EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2087
    if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2088
        && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2089
      {
2090
        tree var = referenced_var (i);
2091
 
2092
        if (TREE_CODE (var) == VAR_DECL
2093
            && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2094
                <= max_total_scalarization_size)
2095
            && type_consists_of_records_p (TREE_TYPE (var)))
2096
          {
2097
            completely_scalarize_record (var, var, 0);
2098
            if (dump_file && (dump_flags & TDF_DETAILS))
2099
              {
2100
                fprintf (dump_file, "Will attempt to totally scalarize ");
2101
                print_generic_expr (dump_file, var, 0);
2102
                fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2103
              }
2104
          }
2105
      }
2106
 
2107
  bitmap_copy (tmp, candidate_bitmap);
2108
  EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2109
    {
2110
      tree var = referenced_var (i);
2111
      struct access *access;
2112
 
2113
      access = sort_and_splice_var_accesses (var);
2114
      if (!access || !build_access_trees (access))
2115
        disqualify_candidate (var,
2116
                              "No or inhibitingly overlapping accesses.");
2117
    }
2118
 
2119
  propagate_all_subaccesses ();
2120
 
2121
  bitmap_copy (tmp, candidate_bitmap);
2122
  EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2123
    {
2124
      tree var = referenced_var (i);
2125
      struct access *access = get_first_repr_for_decl (var);
2126
 
2127
      if (analyze_access_trees (access))
2128
        {
2129
          res++;
2130
          if (dump_file && (dump_flags & TDF_DETAILS))
2131
            {
2132
              fprintf (dump_file, "\nAccess trees for ");
2133
              print_generic_expr (dump_file, var, 0);
2134
              fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2135
              dump_access_tree (dump_file, access);
2136
              fprintf (dump_file, "\n");
2137
            }
2138
        }
2139
      else
2140
        disqualify_candidate (var, "No scalar replacements to be created.");
2141
    }
2142
 
2143
  BITMAP_FREE (tmp);
2144
 
2145
  if (res)
2146
    {
2147
      statistics_counter_event (cfun, "Scalarized aggregates", res);
2148
      return true;
2149
    }
2150
  else
2151
    return false;
2152
}
2153
 
2154
/* Return true iff a reference statement into aggregate AGG can be built for
2155
   every single to-be-replaced accesses that is a child of ACCESS, its sibling
2156
   or a child of its sibling. TOP_OFFSET is the offset from the processed
2157
   access subtree that has to be subtracted from offset of each access.  */
2158
 
2159
static bool
2160
ref_expr_for_all_replacements_p (struct access *access, tree agg,
2161
                                 HOST_WIDE_INT top_offset)
2162
{
2163
  do
2164
    {
2165
      if (access->grp_to_be_replaced
2166
          && !build_ref_for_offset (NULL, TREE_TYPE (agg),
2167
                                    access->offset - top_offset,
2168
                                    access->type, false))
2169
        return false;
2170
 
2171
      if (access->first_child
2172
          && !ref_expr_for_all_replacements_p (access->first_child, agg,
2173
                                               top_offset))
2174
        return false;
2175
 
2176
      access = access->next_sibling;
2177
    }
2178
  while (access);
2179
 
2180
  return true;
2181
}
2182
 
2183
/* Generate statements copying scalar replacements of accesses within a subtree
2184
   into or out of AGG.  ACCESS is the first child of the root of the subtree to
2185
   be processed.  AGG is an aggregate type expression (can be a declaration but
2186
   does not have to be, it can for example also be an indirect_ref).
2187
   TOP_OFFSET is the offset of the processed subtree which has to be subtracted
2188
   from offsets of individual accesses to get corresponding offsets for AGG.
2189
   If CHUNK_SIZE is non-null, copy only replacements in the interval
2190
   <start_offset, start_offset + chunk_size>, otherwise copy all.  GSI is a
2191
   statement iterator used to place the new statements.  WRITE should be true
2192
   when the statements should write from AGG to the replacement and false if
2193
   vice versa.  if INSERT_AFTER is true, new statements will be added after the
2194
   current statement in GSI, they will be added before the statement
2195
   otherwise.  */
2196
 
2197
static void
2198
generate_subtree_copies (struct access *access, tree agg,
2199
                         HOST_WIDE_INT top_offset,
2200
                         HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2201
                         gimple_stmt_iterator *gsi, bool write,
2202
                         bool insert_after)
2203
{
2204
  do
2205
    {
2206
      tree expr = agg;
2207
 
2208
      if (chunk_size && access->offset >= start_offset + chunk_size)
2209
        return;
2210
 
2211
      if (access->grp_to_be_replaced
2212
          && (chunk_size == 0
2213
              || access->offset + access->size > start_offset))
2214
        {
2215
          tree repl = get_access_replacement (access);
2216
          bool ref_found;
2217
          gimple stmt;
2218
 
2219
          ref_found = build_ref_for_offset (&expr, TREE_TYPE (agg),
2220
                                             access->offset - top_offset,
2221
                                             access->type, false);
2222
          gcc_assert (ref_found);
2223
 
2224
          if (write)
2225
            {
2226
              if (access->grp_partial_lhs)
2227
                expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2228
                                                 !insert_after,
2229
                                                 insert_after ? GSI_NEW_STMT
2230
                                                 : GSI_SAME_STMT);
2231
              stmt = gimple_build_assign (repl, expr);
2232
            }
2233
          else
2234
            {
2235
              TREE_NO_WARNING (repl) = 1;
2236
              if (access->grp_partial_lhs)
2237
                repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2238
                                                 !insert_after,
2239
                                                 insert_after ? GSI_NEW_STMT
2240
                                                 : GSI_SAME_STMT);
2241
              stmt = gimple_build_assign (expr, repl);
2242
            }
2243
 
2244
          if (insert_after)
2245
            gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2246
          else
2247
            gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2248
          update_stmt (stmt);
2249
          sra_stats.subtree_copies++;
2250
        }
2251
 
2252
      if (access->first_child)
2253
        generate_subtree_copies (access->first_child, agg, top_offset,
2254
                                 start_offset, chunk_size, gsi,
2255
                                 write, insert_after);
2256
 
2257
      access = access->next_sibling;
2258
    }
2259
  while (access);
2260
}
2261
 
2262
/* Assign zero to all scalar replacements in an access subtree.  ACCESS is the
2263
   the root of the subtree to be processed.  GSI is the statement iterator used
2264
   for inserting statements which are added after the current statement if
2265
   INSERT_AFTER is true or before it otherwise.  */
2266
 
2267
static void
2268
init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2269
                        bool insert_after)
2270
 
2271
{
2272
  struct access *child;
2273
 
2274
  if (access->grp_to_be_replaced)
2275
    {
2276
      gimple stmt;
2277
 
2278
      stmt = gimple_build_assign (get_access_replacement (access),
2279
                                  fold_convert (access->type,
2280
                                                integer_zero_node));
2281
      if (insert_after)
2282
        gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2283
      else
2284
        gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2285
      update_stmt (stmt);
2286
    }
2287
 
2288
  for (child = access->first_child; child; child = child->next_sibling)
2289
    init_subtree_with_zero (child, gsi, insert_after);
2290
}
2291
 
2292
/* Search for an access representative for the given expression EXPR and
2293
   return it or NULL if it cannot be found.  */
2294
 
2295
static struct access *
2296
get_access_for_expr (tree expr)
2297
{
2298
  HOST_WIDE_INT offset, size, max_size;
2299
  tree base;
2300
 
2301
  /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2302
     a different size than the size of its argument and we need the latter
2303
     one.  */
2304
  if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2305
    expr = TREE_OPERAND (expr, 0);
2306
 
2307
  base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2308
  if (max_size == -1 || !DECL_P (base))
2309
    return NULL;
2310
 
2311
  if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2312
    return NULL;
2313
 
2314
  return get_var_base_offset_size_access (base, offset, max_size);
2315
}
2316
 
2317
/* Callback for scan_function.  Replace the expression EXPR with a scalar
2318
   replacement if there is one and generate other statements to do type
2319
   conversion or subtree copying if necessary.  GSI is used to place newly
2320
   created statements, WRITE is true if the expression is being written to (it
2321
   is on a LHS of a statement or output in an assembly statement).  */
2322
 
2323
static bool
2324
sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write,
2325
                 void *data ATTRIBUTE_UNUSED)
2326
{
2327
  struct access *access;
2328
  tree type, bfr;
2329
 
2330
  if (TREE_CODE (*expr) == BIT_FIELD_REF)
2331
    {
2332
      bfr = *expr;
2333
      expr = &TREE_OPERAND (*expr, 0);
2334
    }
2335
  else
2336
    bfr = NULL_TREE;
2337
 
2338
  if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2339
    expr = &TREE_OPERAND (*expr, 0);
2340
  access = get_access_for_expr (*expr);
2341
  if (!access)
2342
    return false;
2343
  type = TREE_TYPE (*expr);
2344
 
2345
  if (access->grp_to_be_replaced)
2346
    {
2347
      tree repl = get_access_replacement (access);
2348
      /* If we replace a non-register typed access simply use the original
2349
         access expression to extract the scalar component afterwards.
2350
         This happens if scalarizing a function return value or parameter
2351
         like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2352
         gcc.c-torture/compile/20011217-1.c.
2353
 
2354
         We also want to use this when accessing a complex or vector which can
2355
         be accessed as a different type too, potentially creating a need for
2356
         type conversion (see PR42196) and when scalarized unions are involved
2357
         in assembler statements (see PR42398).  */
2358
      if (!useless_type_conversion_p (type, access->type))
2359
        {
2360
          tree ref = access->base;
2361
          bool ok;
2362
 
2363
          ok = build_ref_for_offset (&ref, TREE_TYPE (ref),
2364
                                     access->offset, access->type, false);
2365
          gcc_assert (ok);
2366
 
2367
          if (write)
2368
            {
2369
              gimple stmt;
2370
 
2371
              if (access->grp_partial_lhs)
2372
                ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2373
                                                 false, GSI_NEW_STMT);
2374
              stmt = gimple_build_assign (repl, ref);
2375
              gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2376
            }
2377
          else
2378
            {
2379
              gimple stmt;
2380
 
2381
              if (access->grp_partial_lhs)
2382
                repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2383
                                                 true, GSI_SAME_STMT);
2384
              stmt = gimple_build_assign (ref, repl);
2385
              gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2386
            }
2387
        }
2388
      else
2389
        *expr = repl;
2390
      sra_stats.exprs++;
2391
    }
2392
 
2393
  if (access->first_child)
2394
    {
2395
      HOST_WIDE_INT start_offset, chunk_size;
2396
      if (bfr
2397
          && host_integerp (TREE_OPERAND (bfr, 1), 1)
2398
          && host_integerp (TREE_OPERAND (bfr, 2), 1))
2399
        {
2400
          chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2401
          start_offset = access->offset
2402
            + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2403
        }
2404
      else
2405
        start_offset = chunk_size = 0;
2406
 
2407
      generate_subtree_copies (access->first_child, access->base, 0,
2408
                               start_offset, chunk_size, gsi, write, write);
2409
    }
2410
  return true;
2411
}
2412
 
2413
/* Where scalar replacements of the RHS have been written to when a replacement
2414
   of a LHS of an assigments cannot be direclty loaded from a replacement of
2415
   the RHS. */
2416
enum unscalarized_data_handling { SRA_UDH_NONE,  /* Nothing done so far. */
2417
                                  SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2418
                                  SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2419
 
2420
/* Store all replacements in the access tree rooted in TOP_RACC either to their
2421
   base aggregate if there are unscalarized data or directly to LHS
2422
   otherwise.  */
2423
 
2424
static enum unscalarized_data_handling
2425
handle_unscalarized_data_in_subtree (struct access *top_racc, tree lhs,
2426
                                     gimple_stmt_iterator *gsi)
2427
{
2428
  if (top_racc->grp_unscalarized_data)
2429
    {
2430
      generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2431
                               gsi, false, false);
2432
      return SRA_UDH_RIGHT;
2433
    }
2434
  else
2435
    {
2436
      generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2437
                               0, 0, gsi, false, false);
2438
      return SRA_UDH_LEFT;
2439
    }
2440
}
2441
 
2442
 
2443
/* Try to generate statements to load all sub-replacements in an access
2444
   (sub)tree (LACC is the first child) from scalar replacements in the TOP_RACC
2445
   (sub)tree.  If that is not possible, refresh the TOP_RACC base aggregate and
2446
   load the accesses from it.  LEFT_OFFSET is the offset of the left whole
2447
   subtree being copied, RIGHT_OFFSET is the same thing for the right subtree.
2448
   NEW_GSI is stmt iterator used for statement insertions after the original
2449
   assignment, OLD_GSI is used to insert statements before the assignment.
2450
   *REFRESHED keeps the information whether we have needed to refresh
2451
   replacements of the LHS and from which side of the assignments this takes
2452
   place.  */
2453
 
2454
static void
2455
load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2456
                                 HOST_WIDE_INT left_offset,
2457
                                 HOST_WIDE_INT right_offset,
2458
                                 gimple_stmt_iterator *old_gsi,
2459
                                 gimple_stmt_iterator *new_gsi,
2460
                                 enum unscalarized_data_handling *refreshed,
2461
                                 tree lhs)
2462
{
2463
  location_t loc = EXPR_LOCATION (lacc->expr);
2464
  do
2465
    {
2466
      if (lacc->grp_to_be_replaced)
2467
        {
2468
          struct access *racc;
2469
          HOST_WIDE_INT offset = lacc->offset - left_offset + right_offset;
2470
          gimple stmt;
2471
          tree rhs;
2472
 
2473
          racc = find_access_in_subtree (top_racc, offset, lacc->size);
2474
          if (racc && racc->grp_to_be_replaced)
2475
            {
2476
              rhs = get_access_replacement (racc);
2477
              if (!useless_type_conversion_p (lacc->type, racc->type))
2478
                rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2479
            }
2480
          else
2481
            {
2482
              /* No suitable access on the right hand side, need to load from
2483
                 the aggregate.  See if we have to update it first... */
2484
              if (*refreshed == SRA_UDH_NONE)
2485
                *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2486
                                                                  lhs, old_gsi);
2487
 
2488
              if (*refreshed == SRA_UDH_LEFT)
2489
                {
2490
                  bool repl_found;
2491
 
2492
                  rhs = lacc->base;
2493
                  repl_found = build_ref_for_offset (&rhs, TREE_TYPE (rhs),
2494
                                                     lacc->offset, lacc->type,
2495
                                                     false);
2496
                  gcc_assert (repl_found);
2497
                }
2498
              else
2499
                {
2500
                  bool repl_found;
2501
 
2502
                  rhs = top_racc->base;
2503
                  repl_found = build_ref_for_offset (&rhs,
2504
                                                     TREE_TYPE (top_racc->base),
2505
                                                     offset, lacc->type, false);
2506
                  gcc_assert (repl_found);
2507
                }
2508
            }
2509
 
2510
          stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2511
          gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2512
          update_stmt (stmt);
2513
          sra_stats.subreplacements++;
2514
        }
2515
      else if (*refreshed == SRA_UDH_NONE
2516
               && lacc->grp_read && !lacc->grp_covered)
2517
        *refreshed = handle_unscalarized_data_in_subtree (top_racc, lhs,
2518
                                                          old_gsi);
2519
 
2520
      if (lacc->first_child)
2521
        load_assign_lhs_subreplacements (lacc->first_child, top_racc,
2522
                                         left_offset, right_offset,
2523
                                         old_gsi, new_gsi, refreshed, lhs);
2524
      lacc = lacc->next_sibling;
2525
    }
2526
  while (lacc);
2527
}
2528
 
2529
/* Modify assignments with a CONSTRUCTOR on their RHS.  STMT contains a pointer
2530
   to the assignment and GSI is the statement iterator pointing at it.  Returns
2531
   the same values as sra_modify_assign.  */
2532
 
2533
static enum scan_assign_result
2534
sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2535
{
2536
  tree lhs = gimple_assign_lhs (*stmt);
2537
  struct access *acc;
2538
 
2539
  acc = get_access_for_expr (lhs);
2540
  if (!acc)
2541
    return SRA_SA_NONE;
2542
 
2543
  if (VEC_length (constructor_elt,
2544
                  CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2545
    {
2546
      /* I have never seen this code path trigger but if it can happen the
2547
         following should handle it gracefully.  */
2548
      if (access_has_children_p (acc))
2549
        generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2550
                                 true, true);
2551
      return SRA_SA_PROCESSED;
2552
    }
2553
 
2554
  if (acc->grp_covered)
2555
    {
2556
      init_subtree_with_zero (acc, gsi, false);
2557
      unlink_stmt_vdef (*stmt);
2558
      gsi_remove (gsi, true);
2559
      return SRA_SA_REMOVED;
2560
    }
2561
  else
2562
    {
2563
      init_subtree_with_zero (acc, gsi, true);
2564
      return SRA_SA_PROCESSED;
2565
    }
2566
}
2567
 
2568
/* Create a new suitable default definition SSA_NAME and replace all uses of
2569
   SSA with it, RACC is access describing the uninitialized part of an
2570
   aggregate that is being loaded.  */
2571
 
2572
static void
2573
replace_uses_with_default_def_ssa_name (tree ssa, struct access *racc)
2574
{
2575
  tree repl, decl;
2576
 
2577
  decl = get_unrenamed_access_replacement (racc);
2578
 
2579
  repl = gimple_default_def (cfun, decl);
2580
  if (!repl)
2581
    {
2582
      repl = make_ssa_name (decl, gimple_build_nop ());
2583
      set_default_def (decl, repl);
2584
    }
2585
 
2586
  replace_uses_by (ssa, repl);
2587
}
2588
 
2589
/* Callback of scan_function to process assign statements.  It examines both
2590
   sides of the statement, replaces them with a scalare replacement if there is
2591
   one and generating copying of replacements if scalarized aggregates have been
2592
   used in the assignment.  STMT is a pointer to the assign statement, GSI is
2593
   used to hold generated statements for type conversions and subtree
2594
   copying.  */
2595
 
2596
static enum scan_assign_result
2597
sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
2598
                   void *data ATTRIBUTE_UNUSED)
2599
{
2600
  struct access *lacc, *racc;
2601
  tree lhs, rhs;
2602
  bool modify_this_stmt = false;
2603
  bool force_gimple_rhs = false;
2604
  location_t loc = gimple_location (*stmt);
2605
  gimple_stmt_iterator orig_gsi = *gsi;
2606
 
2607
  if (!gimple_assign_single_p (*stmt))
2608
    return SRA_SA_NONE;
2609
  lhs = gimple_assign_lhs (*stmt);
2610
  rhs = gimple_assign_rhs1 (*stmt);
2611
 
2612
  if (TREE_CODE (rhs) == CONSTRUCTOR)
2613
    return sra_modify_constructor_assign (stmt, gsi);
2614
 
2615
  if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2616
      || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2617
      || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2618
    {
2619
      modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2620
                                          gsi, false, data);
2621
      modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2622
                                           gsi, true, data);
2623
      return modify_this_stmt ? SRA_SA_PROCESSED : SRA_SA_NONE;
2624
    }
2625
 
2626
  lacc = get_access_for_expr (lhs);
2627
  racc = get_access_for_expr (rhs);
2628
  if (!lacc && !racc)
2629
    return SRA_SA_NONE;
2630
 
2631
  if (lacc && lacc->grp_to_be_replaced)
2632
    {
2633
      lhs = get_access_replacement (lacc);
2634
      gimple_assign_set_lhs (*stmt, lhs);
2635
      modify_this_stmt = true;
2636
      if (lacc->grp_partial_lhs)
2637
        force_gimple_rhs = true;
2638
      sra_stats.exprs++;
2639
    }
2640
 
2641
  if (racc && racc->grp_to_be_replaced)
2642
    {
2643
      rhs = get_access_replacement (racc);
2644
      modify_this_stmt = true;
2645
      if (racc->grp_partial_lhs)
2646
        force_gimple_rhs = true;
2647
      sra_stats.exprs++;
2648
    }
2649
 
2650
  if (modify_this_stmt)
2651
    {
2652
      if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2653
        {
2654
          /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2655
             ???  This should move to fold_stmt which we simply should
2656
             call after building a VIEW_CONVERT_EXPR here.  */
2657
          if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2658
              && !access_has_children_p (lacc))
2659
            {
2660
              tree expr = lhs;
2661
              if (build_ref_for_offset (&expr, TREE_TYPE (lhs), 0,
2662
                                        TREE_TYPE (rhs), false))
2663
                {
2664
                  lhs = expr;
2665
                  gimple_assign_set_lhs (*stmt, expr);
2666
                }
2667
            }
2668
          else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2669
                   && !access_has_children_p (racc))
2670
            {
2671
              tree expr = rhs;
2672
              if (build_ref_for_offset (&expr, TREE_TYPE (rhs), 0,
2673
                                        TREE_TYPE (lhs), false))
2674
                rhs = expr;
2675
            }
2676
          if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2677
            {
2678
              rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2679
              if (is_gimple_reg_type (TREE_TYPE (lhs))
2680
                  && TREE_CODE (lhs) != SSA_NAME)
2681
                force_gimple_rhs = true;
2682
            }
2683
        }
2684
    }
2685
 
2686
  /* From this point on, the function deals with assignments in between
2687
     aggregates when at least one has scalar reductions of some of its
2688
     components.  There are three possible scenarios: Both the LHS and RHS have
2689
     to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2690
 
2691
     In the first case, we would like to load the LHS components from RHS
2692
     components whenever possible.  If that is not possible, we would like to
2693
     read it directly from the RHS (after updating it by storing in it its own
2694
     components).  If there are some necessary unscalarized data in the LHS,
2695
     those will be loaded by the original assignment too.  If neither of these
2696
     cases happen, the original statement can be removed.  Most of this is done
2697
     by load_assign_lhs_subreplacements.
2698
 
2699
     In the second case, we would like to store all RHS scalarized components
2700
     directly into LHS and if they cover the aggregate completely, remove the
2701
     statement too.  In the third case, we want the LHS components to be loaded
2702
     directly from the RHS (DSE will remove the original statement if it
2703
     becomes redundant).
2704
 
2705
     This is a bit complex but manageable when types match and when unions do
2706
     not cause confusion in a way that we cannot really load a component of LHS
2707
     from the RHS or vice versa (the access representing this level can have
2708
     subaccesses that are accessible only through a different union field at a
2709
     higher level - different from the one used in the examined expression).
2710
     Unions are fun.
2711
 
2712
     Therefore, I specially handle a fourth case, happening when there is a
2713
     specific type cast or it is impossible to locate a scalarized subaccess on
2714
     the other side of the expression.  If that happens, I simply "refresh" the
2715
     RHS by storing in it is scalarized components leave the original statement
2716
     there to do the copying and then load the scalar replacements of the LHS.
2717
     This is what the first branch does.  */
2718
 
2719
  if (gimple_has_volatile_ops (*stmt)
2720
      || contains_view_convert_expr_p (rhs)
2721
      || contains_view_convert_expr_p (lhs)
2722
      || (access_has_children_p (racc)
2723
          && !ref_expr_for_all_replacements_p (racc, lhs, racc->offset))
2724
      || (access_has_children_p (lacc)
2725
          && !ref_expr_for_all_replacements_p (lacc, rhs, lacc->offset)))
2726
    {
2727
      if (access_has_children_p (racc))
2728
        generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
2729
                                 gsi, false, false);
2730
      if (access_has_children_p (lacc))
2731
        generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
2732
                                 gsi, true, true);
2733
      sra_stats.separate_lhs_rhs_handling++;
2734
    }
2735
  else
2736
    {
2737
      if (access_has_children_p (lacc) && access_has_children_p (racc))
2738
        {
2739
          gimple_stmt_iterator orig_gsi = *gsi;
2740
          enum unscalarized_data_handling refreshed;
2741
 
2742
          if (lacc->grp_read && !lacc->grp_covered)
2743
            refreshed = handle_unscalarized_data_in_subtree (racc, lhs, gsi);
2744
          else
2745
            refreshed = SRA_UDH_NONE;
2746
 
2747
          load_assign_lhs_subreplacements (lacc->first_child, racc,
2748
                                           lacc->offset, racc->offset,
2749
                                           &orig_gsi, gsi, &refreshed, lhs);
2750
          if (refreshed != SRA_UDH_RIGHT)
2751
            {
2752
              gsi_next (gsi);
2753
              unlink_stmt_vdef (*stmt);
2754
              gsi_remove (&orig_gsi, true);
2755
              sra_stats.deleted++;
2756
              return SRA_SA_REMOVED;
2757
            }
2758
        }
2759
      else
2760
        {
2761
          if (racc)
2762
            {
2763
              if (!racc->grp_to_be_replaced && !racc->grp_unscalarized_data)
2764
                {
2765
                  if (racc->first_child)
2766
                    generate_subtree_copies (racc->first_child, lhs,
2767
                                             racc->offset, 0, 0, gsi,
2768
                                             false, false);
2769
                  gcc_assert (*stmt == gsi_stmt (*gsi));
2770
                  if (TREE_CODE (lhs) == SSA_NAME)
2771
                    replace_uses_with_default_def_ssa_name (lhs, racc);
2772
 
2773
                  unlink_stmt_vdef (*stmt);
2774
                  gsi_remove (gsi, true);
2775
                  sra_stats.deleted++;
2776
                  return SRA_SA_REMOVED;
2777
                }
2778
              else if (racc->first_child)
2779
                generate_subtree_copies (racc->first_child, lhs,
2780
                                         racc->offset, 0, 0, gsi, false, true);
2781
            }
2782
          if (access_has_children_p (lacc))
2783
            generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
2784
                                     0, 0, gsi, true, true);
2785
        }
2786
    }
2787
 
2788
  /* This gimplification must be done after generate_subtree_copies, lest we
2789
     insert the subtree copies in the middle of the gimplified sequence.  */
2790
  if (force_gimple_rhs)
2791
    rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
2792
                                    true, GSI_SAME_STMT);
2793
  if (gimple_assign_rhs1 (*stmt) != rhs)
2794
    {
2795
      gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
2796
      gcc_assert (*stmt == gsi_stmt (orig_gsi));
2797
    }
2798
 
2799
  return modify_this_stmt ? SRA_SA_PROCESSED : SRA_SA_NONE;
2800
}
2801
 
2802
/* Generate statements initializing scalar replacements of parts of function
2803
   parameters.  */
2804
 
2805
static void
2806
initialize_parameter_reductions (void)
2807
{
2808
  gimple_stmt_iterator gsi;
2809
  gimple_seq seq = NULL;
2810
  tree parm;
2811
 
2812
  for (parm = DECL_ARGUMENTS (current_function_decl);
2813
       parm;
2814
       parm = TREE_CHAIN (parm))
2815
    {
2816
      VEC (access_p, heap) *access_vec;
2817
      struct access *access;
2818
 
2819
      if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
2820
        continue;
2821
      access_vec = get_base_access_vector (parm);
2822
      if (!access_vec)
2823
        continue;
2824
 
2825
      if (!seq)
2826
        {
2827
          seq = gimple_seq_alloc ();
2828
          gsi = gsi_start (seq);
2829
        }
2830
 
2831
      for (access = VEC_index (access_p, access_vec, 0);
2832
           access;
2833
           access = access->next_grp)
2834
        generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true);
2835
    }
2836
 
2837
  if (seq)
2838
    gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
2839
}
2840
 
2841
/* The "main" function of intraprocedural SRA passes.  Runs the analysis and if
2842
   it reveals there are components of some aggregates to be scalarized, it runs
2843
   the required transformations.  */
2844
static unsigned int
2845
perform_intra_sra (void)
2846
{
2847
  int ret = 0;
2848
  sra_initialize ();
2849
 
2850
  if (!find_var_candidates ())
2851
    goto out;
2852
 
2853
  if (!scan_function (build_access_from_expr, build_accesses_from_assign, NULL,
2854
                      true, NULL))
2855
    goto out;
2856
 
2857
  if (!analyze_all_variable_accesses ())
2858
    goto out;
2859
 
2860
  scan_function (sra_modify_expr, sra_modify_assign, NULL, false, NULL);
2861
  initialize_parameter_reductions ();
2862
 
2863
  statistics_counter_event (cfun, "Scalar replacements created",
2864
                            sra_stats.replacements);
2865
  statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
2866
  statistics_counter_event (cfun, "Subtree copy stmts",
2867
                            sra_stats.subtree_copies);
2868
  statistics_counter_event (cfun, "Subreplacement stmts",
2869
                            sra_stats.subreplacements);
2870
  statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
2871
  statistics_counter_event (cfun, "Separate LHS and RHS handling",
2872
                            sra_stats.separate_lhs_rhs_handling);
2873
 
2874
  ret = TODO_update_ssa;
2875
 
2876
 out:
2877
  sra_deinitialize ();
2878
  return ret;
2879
}
2880
 
2881
/* Perform early intraprocedural SRA.  */
2882
static unsigned int
2883
early_intra_sra (void)
2884
{
2885
  sra_mode = SRA_MODE_EARLY_INTRA;
2886
  return perform_intra_sra ();
2887
}
2888
 
2889
/* Perform "late" intraprocedural SRA.  */
2890
static unsigned int
2891
late_intra_sra (void)
2892
{
2893
  sra_mode = SRA_MODE_INTRA;
2894
  return perform_intra_sra ();
2895
}
2896
 
2897
 
2898
static bool
2899
gate_intra_sra (void)
2900
{
2901
  return flag_tree_sra != 0;
2902
}
2903
 
2904
 
2905
struct gimple_opt_pass pass_sra_early =
2906
{
2907
 {
2908
  GIMPLE_PASS,
2909
  "esra",                               /* name */
2910
  gate_intra_sra,                       /* gate */
2911
  early_intra_sra,                      /* execute */
2912
  NULL,                                 /* sub */
2913
  NULL,                                 /* next */
2914
  0,                                     /* static_pass_number */
2915
  TV_TREE_SRA,                          /* tv_id */
2916
  PROP_cfg | PROP_ssa,                  /* properties_required */
2917
  0,                                     /* properties_provided */
2918
  0,                                     /* properties_destroyed */
2919
  0,                                     /* todo_flags_start */
2920
  TODO_dump_func
2921
  | TODO_update_ssa
2922
  | TODO_ggc_collect
2923
  | TODO_verify_ssa                     /* todo_flags_finish */
2924
 }
2925
};
2926
 
2927
struct gimple_opt_pass pass_sra =
2928
{
2929
 {
2930
  GIMPLE_PASS,
2931
  "sra",                                /* name */
2932
  gate_intra_sra,                       /* gate */
2933
  late_intra_sra,                       /* execute */
2934
  NULL,                                 /* sub */
2935
  NULL,                                 /* next */
2936
  0,                                     /* static_pass_number */
2937
  TV_TREE_SRA,                          /* tv_id */
2938
  PROP_cfg | PROP_ssa,                  /* properties_required */
2939
  0,                                     /* properties_provided */
2940
  0,                                     /* properties_destroyed */
2941
  TODO_update_address_taken,            /* todo_flags_start */
2942
  TODO_dump_func
2943
  | TODO_update_ssa
2944
  | TODO_ggc_collect
2945
  | TODO_verify_ssa                     /* todo_flags_finish */
2946
 }
2947
};
2948
 
2949
 
2950
/* Return true iff PARM (which must be a parm_decl) is an unused scalar
2951
   parameter.  */
2952
 
2953
static bool
2954
is_unused_scalar_param (tree parm)
2955
{
2956
  tree name;
2957
  return (is_gimple_reg (parm)
2958
          && (!(name = gimple_default_def (cfun, parm))
2959
              || has_zero_uses (name)));
2960
}
2961
 
2962
/* Scan immediate uses of a default definition SSA name of a parameter PARM and
2963
   examine whether there are any direct or otherwise infeasible ones.  If so,
2964
   return true, otherwise return false.  PARM must be a gimple register with a
2965
   non-NULL default definition.  */
2966
 
2967
static bool
2968
ptr_parm_has_direct_uses (tree parm)
2969
{
2970
  imm_use_iterator ui;
2971
  gimple stmt;
2972
  tree name = gimple_default_def (cfun, parm);
2973
  bool ret = false;
2974
 
2975
  FOR_EACH_IMM_USE_STMT (stmt, ui, name)
2976
    {
2977
      int uses_ok = 0;
2978
      use_operand_p use_p;
2979
 
2980
      if (is_gimple_debug (stmt))
2981
        continue;
2982
 
2983
      /* Valid uses include dereferences on the lhs and the rhs.  */
2984
      if (gimple_has_lhs (stmt))
2985
        {
2986
          tree lhs = gimple_get_lhs (stmt);
2987
          while (handled_component_p (lhs))
2988
            lhs = TREE_OPERAND (lhs, 0);
2989
          if (INDIRECT_REF_P (lhs)
2990
              && TREE_OPERAND (lhs, 0) == name)
2991
            uses_ok++;
2992
        }
2993
      if (gimple_assign_single_p (stmt))
2994
        {
2995
          tree rhs = gimple_assign_rhs1 (stmt);
2996
          while (handled_component_p (rhs))
2997
            rhs = TREE_OPERAND (rhs, 0);
2998
          if (INDIRECT_REF_P (rhs)
2999
              && TREE_OPERAND (rhs, 0) == name)
3000
            uses_ok++;
3001
        }
3002
      else if (is_gimple_call (stmt))
3003
        {
3004
          unsigned i;
3005
          for (i = 0; i < gimple_call_num_args (stmt); ++i)
3006
            {
3007
              tree arg = gimple_call_arg (stmt, i);
3008
              while (handled_component_p (arg))
3009
                arg = TREE_OPERAND (arg, 0);
3010
              if (INDIRECT_REF_P (arg)
3011
                  && TREE_OPERAND (arg, 0) == name)
3012
                uses_ok++;
3013
            }
3014
        }
3015
 
3016
      /* If the number of valid uses does not match the number of
3017
         uses in this stmt there is an unhandled use.  */
3018
      FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3019
        --uses_ok;
3020
 
3021
      if (uses_ok != 0)
3022
        ret = true;
3023
 
3024
      if (ret)
3025
        BREAK_FROM_IMM_USE_STMT (ui);
3026
    }
3027
 
3028
  return ret;
3029
}
3030
 
3031
/* Identify candidates for reduction for IPA-SRA based on their type and mark
3032
   them in candidate_bitmap.  Note that these do not necessarily include
3033
   parameter which are unused and thus can be removed.  Return true iff any
3034
   such candidate has been found.  */
3035
 
3036
static bool
3037
find_param_candidates (void)
3038
{
3039
  tree parm;
3040
  int count = 0;
3041
  bool ret = false;
3042
 
3043
  for (parm = DECL_ARGUMENTS (current_function_decl);
3044
       parm;
3045
       parm = TREE_CHAIN (parm))
3046
    {
3047
      tree type = TREE_TYPE (parm);
3048
 
3049
      count++;
3050
 
3051
      if (TREE_THIS_VOLATILE (parm)
3052
          || TREE_ADDRESSABLE (parm)
3053
          || is_va_list_type (type))
3054
        continue;
3055
 
3056
      if (is_unused_scalar_param (parm))
3057
        {
3058
          ret = true;
3059
          continue;
3060
        }
3061
 
3062
      if (POINTER_TYPE_P (type))
3063
        {
3064
          type = TREE_TYPE (type);
3065
 
3066
          if (TREE_CODE (type) == FUNCTION_TYPE
3067
              || TYPE_VOLATILE (type)
3068
              || !is_gimple_reg (parm)
3069
              || is_va_list_type (type)
3070
              || ptr_parm_has_direct_uses (parm))
3071
            continue;
3072
        }
3073
      else if (!AGGREGATE_TYPE_P (type))
3074
        continue;
3075
 
3076
      if (!COMPLETE_TYPE_P (type)
3077
          || !host_integerp (TYPE_SIZE (type), 1)
3078
          || tree_low_cst (TYPE_SIZE (type), 1) == 0
3079
          || (AGGREGATE_TYPE_P (type)
3080
              && type_internals_preclude_sra_p (type)))
3081
        continue;
3082
 
3083
      bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3084
      ret = true;
3085
      if (dump_file && (dump_flags & TDF_DETAILS))
3086
        {
3087
          fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3088
          print_generic_expr (dump_file, parm, 0);
3089
          fprintf (dump_file, "\n");
3090
        }
3091
    }
3092
 
3093
  func_param_count = count;
3094
  return ret;
3095
}
3096
 
3097
/* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3098
   maybe_modified. */
3099
 
3100
static bool
3101
mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3102
                     void *data)
3103
{
3104
  struct access *repr = (struct access *) data;
3105
 
3106
  repr->grp_maybe_modified = 1;
3107
  return true;
3108
}
3109
 
3110
/* Analyze what representatives (in linked lists accessible from
3111
   REPRESENTATIVES) can be modified by side effects of statements in the
3112
   current function.  */
3113
 
3114
static void
3115
analyze_modified_params (VEC (access_p, heap) *representatives)
3116
{
3117
  int i;
3118
 
3119
  for (i = 0; i < func_param_count; i++)
3120
    {
3121
      struct access *repr;
3122
 
3123
      for (repr = VEC_index (access_p, representatives, i);
3124
           repr;
3125
           repr = repr->next_grp)
3126
        {
3127
          struct access *access;
3128
          bitmap visited;
3129
          ao_ref ar;
3130
 
3131
          if (no_accesses_p (repr))
3132
            continue;
3133
          if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3134
              || repr->grp_maybe_modified)
3135
            continue;
3136
 
3137
          ao_ref_init (&ar, repr->expr);
3138
          visited = BITMAP_ALLOC (NULL);
3139
          for (access = repr; access; access = access->next_sibling)
3140
            {
3141
              /* All accesses are read ones, otherwise grp_maybe_modified would
3142
                 be trivially set.  */
3143
              walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3144
                                  mark_maybe_modified, repr, &visited);
3145
              if (repr->grp_maybe_modified)
3146
                break;
3147
            }
3148
          BITMAP_FREE (visited);
3149
        }
3150
    }
3151
}
3152
 
3153
/* Propagate distances in bb_dereferences in the opposite direction than the
3154
   control flow edges, in each step storing the maximum of the current value
3155
   and the minimum of all successors.  These steps are repeated until the table
3156
   stabilizes.  Note that BBs which might terminate the functions (according to
3157
   final_bbs bitmap) never updated in this way.  */
3158
 
3159
static void
3160
propagate_dereference_distances (void)
3161
{
3162
  VEC (basic_block, heap) *queue;
3163
  basic_block bb;
3164
 
3165
  queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3166
  VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3167
  FOR_EACH_BB (bb)
3168
    {
3169
      VEC_quick_push (basic_block, queue, bb);
3170
      bb->aux = bb;
3171
    }
3172
 
3173
  while (!VEC_empty (basic_block, queue))
3174
    {
3175
      edge_iterator ei;
3176
      edge e;
3177
      bool change = false;
3178
      int i;
3179
 
3180
      bb = VEC_pop (basic_block, queue);
3181
      bb->aux = NULL;
3182
 
3183
      if (bitmap_bit_p (final_bbs, bb->index))
3184
        continue;
3185
 
3186
      for (i = 0; i < func_param_count; i++)
3187
        {
3188
          int idx = bb->index * func_param_count + i;
3189
          bool first = true;
3190
          HOST_WIDE_INT inh = 0;
3191
 
3192
          FOR_EACH_EDGE (e, ei, bb->succs)
3193
          {
3194
            int succ_idx = e->dest->index * func_param_count + i;
3195
 
3196
            if (e->src == EXIT_BLOCK_PTR)
3197
              continue;
3198
 
3199
            if (first)
3200
              {
3201
                first = false;
3202
                inh = bb_dereferences [succ_idx];
3203
              }
3204
            else if (bb_dereferences [succ_idx] < inh)
3205
              inh = bb_dereferences [succ_idx];
3206
          }
3207
 
3208
          if (!first && bb_dereferences[idx] < inh)
3209
            {
3210
              bb_dereferences[idx] = inh;
3211
              change = true;
3212
            }
3213
        }
3214
 
3215
      if (change && !bitmap_bit_p (final_bbs, bb->index))
3216
        FOR_EACH_EDGE (e, ei, bb->preds)
3217
          {
3218
            if (e->src->aux)
3219
              continue;
3220
 
3221
            e->src->aux = e->src;
3222
            VEC_quick_push (basic_block, queue, e->src);
3223
          }
3224
    }
3225
 
3226
  VEC_free (basic_block, heap, queue);
3227
}
3228
 
3229
/* Dump a dereferences TABLE with heading STR to file F.  */
3230
 
3231
static void
3232
dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3233
{
3234
  basic_block bb;
3235
 
3236
  fprintf (dump_file, str);
3237
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3238
    {
3239
      fprintf (f, "%4i  %i   ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3240
      if (bb != EXIT_BLOCK_PTR)
3241
        {
3242
          int i;
3243
          for (i = 0; i < func_param_count; i++)
3244
            {
3245
              int idx = bb->index * func_param_count + i;
3246
              fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3247
            }
3248
        }
3249
      fprintf (f, "\n");
3250
    }
3251
  fprintf (dump_file, "\n");
3252
}
3253
 
3254
/* Determine what (parts of) parameters passed by reference that are not
3255
   assigned to are not certainly dereferenced in this function and thus the
3256
   dereferencing cannot be safely moved to the caller without potentially
3257
   introducing a segfault.  Mark such REPRESENTATIVES as
3258
   grp_not_necessarilly_dereferenced.
3259
 
3260
   The dereferenced maximum "distance," i.e. the offset + size of the accessed
3261
   part is calculated rather than simple booleans are calculated for each
3262
   pointer parameter to handle cases when only a fraction of the whole
3263
   aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3264
   an example).
3265
 
3266
   The maximum dereference distances for each pointer parameter and BB are
3267
   already stored in bb_dereference.  This routine simply propagates these
3268
   values upwards by propagate_dereference_distances and then compares the
3269
   distances of individual parameters in the ENTRY BB to the equivalent
3270
   distances of each representative of a (fraction of a) parameter.  */
3271
 
3272
static void
3273
analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3274
{
3275
  int i;
3276
 
3277
  if (dump_file && (dump_flags & TDF_DETAILS))
3278
    dump_dereferences_table (dump_file,
3279
                             "Dereference table before propagation:\n",
3280
                             bb_dereferences);
3281
 
3282
  propagate_dereference_distances ();
3283
 
3284
  if (dump_file && (dump_flags & TDF_DETAILS))
3285
    dump_dereferences_table (dump_file,
3286
                             "Dereference table after propagation:\n",
3287
                             bb_dereferences);
3288
 
3289
  for (i = 0; i < func_param_count; i++)
3290
    {
3291
      struct access *repr = VEC_index (access_p, representatives, i);
3292
      int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3293
 
3294
      if (!repr || no_accesses_p (repr))
3295
        continue;
3296
 
3297
      do
3298
        {
3299
          if ((repr->offset + repr->size) > bb_dereferences[idx])
3300
            repr->grp_not_necessarilly_dereferenced = 1;
3301
          repr = repr->next_grp;
3302
        }
3303
      while (repr);
3304
    }
3305
}
3306
 
3307
/* Return the representative access for the parameter declaration PARM if it is
3308
   a scalar passed by reference which is not written to and the pointer value
3309
   is not used directly.  Thus, if it is legal to dereference it in the caller
3310
   and we can rule out modifications through aliases, such parameter should be
3311
   turned into one passed by value.  Return NULL otherwise.  */
3312
 
3313
static struct access *
3314
unmodified_by_ref_scalar_representative (tree parm)
3315
{
3316
  int i, access_count;
3317
  struct access *repr;
3318
  VEC (access_p, heap) *access_vec;
3319
 
3320
  access_vec = get_base_access_vector (parm);
3321
  gcc_assert (access_vec);
3322
  repr = VEC_index (access_p, access_vec, 0);
3323
  if (repr->write)
3324
    return NULL;
3325
  repr->group_representative = repr;
3326
 
3327
  access_count = VEC_length (access_p, access_vec);
3328
  for (i = 1; i < access_count; i++)
3329
    {
3330
      struct access *access = VEC_index (access_p, access_vec, i);
3331
      if (access->write)
3332
        return NULL;
3333
      access->group_representative = repr;
3334
      access->next_sibling = repr->next_sibling;
3335
      repr->next_sibling = access;
3336
    }
3337
 
3338
  repr->grp_read = 1;
3339
  repr->grp_scalar_ptr = 1;
3340
  return repr;
3341
}
3342
 
3343
/* Return true iff this access precludes IPA-SRA of the parameter it is
3344
   associated with. */
3345
 
3346
static bool
3347
access_precludes_ipa_sra_p (struct access *access)
3348
{
3349
  /* Avoid issues such as the second simple testcase in PR 42025.  The problem
3350
     is incompatible assign in a call statement (and possibly even in asm
3351
     statements).  This can be relaxed by using a new temporary but only for
3352
     non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3353
     intraprocedural SRA we deal with this by keeping the old aggregate around,
3354
     something we cannot do in IPA-SRA.)  */
3355
  if (access->write
3356
      && (is_gimple_call (access->stmt)
3357
          || gimple_code (access->stmt) == GIMPLE_ASM))
3358
    return true;
3359
 
3360
  return false;
3361
}
3362
 
3363
 
3364
/* Sort collected accesses for parameter PARM, identify representatives for
3365
   each accessed region and link them together.  Return NULL if there are
3366
   different but overlapping accesses, return the special ptr value meaning
3367
   there are no accesses for this parameter if that is the case and return the
3368
   first representative otherwise.  Set *RO_GRP if there is a group of accesses
3369
   with only read (i.e. no write) accesses.  */
3370
 
3371
static struct access *
3372
splice_param_accesses (tree parm, bool *ro_grp)
3373
{
3374
  int i, j, access_count, group_count;
3375
  int agg_size, total_size = 0;
3376
  struct access *access, *res, **prev_acc_ptr = &res;
3377
  VEC (access_p, heap) *access_vec;
3378
 
3379
  access_vec = get_base_access_vector (parm);
3380
  if (!access_vec)
3381
    return &no_accesses_representant;
3382
  access_count = VEC_length (access_p, access_vec);
3383
 
3384
  qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
3385
         compare_access_positions);
3386
 
3387
  i = 0;
3388
  total_size = 0;
3389
  group_count = 0;
3390
  while (i < access_count)
3391
    {
3392
      bool modification;
3393
      access = VEC_index (access_p, access_vec, i);
3394
      modification = access->write;
3395
      if (access_precludes_ipa_sra_p (access))
3396
        return NULL;
3397
 
3398
      /* Access is about to become group representative unless we find some
3399
         nasty overlap which would preclude us from breaking this parameter
3400
         apart. */
3401
 
3402
      j = i + 1;
3403
      while (j < access_count)
3404
        {
3405
          struct access *ac2 = VEC_index (access_p, access_vec, j);
3406
          if (ac2->offset != access->offset)
3407
            {
3408
              /* All or nothing law for parameters. */
3409
              if (access->offset + access->size > ac2->offset)
3410
                return NULL;
3411
              else
3412
                break;
3413
            }
3414
          else if (ac2->size != access->size)
3415
            return NULL;
3416
 
3417
          if (access_precludes_ipa_sra_p (ac2))
3418
            return NULL;
3419
 
3420
          modification |= ac2->write;
3421
          ac2->group_representative = access;
3422
          ac2->next_sibling = access->next_sibling;
3423
          access->next_sibling = ac2;
3424
          j++;
3425
        }
3426
 
3427
      group_count++;
3428
      access->grp_maybe_modified = modification;
3429
      if (!modification)
3430
        *ro_grp = true;
3431
      *prev_acc_ptr = access;
3432
      prev_acc_ptr = &access->next_grp;
3433
      total_size += access->size;
3434
      i = j;
3435
    }
3436
 
3437
  if (POINTER_TYPE_P (TREE_TYPE (parm)))
3438
    agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3439
  else
3440
    agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3441
  if (total_size >= agg_size)
3442
    return NULL;
3443
 
3444
  gcc_assert (group_count > 0);
3445
  return res;
3446
}
3447
 
3448
/* Decide whether parameters with representative accesses given by REPR should
3449
   be reduced into components.  */
3450
 
3451
static int
3452
decide_one_param_reduction (struct access *repr)
3453
{
3454
  int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3455
  bool by_ref;
3456
  tree parm;
3457
 
3458
  parm = repr->base;
3459
  cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3460
  gcc_assert (cur_parm_size > 0);
3461
 
3462
  if (POINTER_TYPE_P (TREE_TYPE (parm)))
3463
    {
3464
      by_ref = true;
3465
      agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3466
    }
3467
  else
3468
    {
3469
      by_ref = false;
3470
      agg_size = cur_parm_size;
3471
    }
3472
 
3473
  if (dump_file)
3474
    {
3475
      struct access *acc;
3476
      fprintf (dump_file, "Evaluating PARAM group sizes for ");
3477
      print_generic_expr (dump_file, parm, 0);
3478
      fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3479
      for (acc = repr; acc; acc = acc->next_grp)
3480
        dump_access (dump_file, acc, true);
3481
    }
3482
 
3483
  total_size = 0;
3484
  new_param_count = 0;
3485
 
3486
  for (; repr; repr = repr->next_grp)
3487
    {
3488
      gcc_assert (parm == repr->base);
3489
      new_param_count++;
3490
 
3491
      if (!by_ref || (!repr->grp_maybe_modified
3492
                      && !repr->grp_not_necessarilly_dereferenced))
3493
        total_size += repr->size;
3494
      else
3495
        total_size += cur_parm_size;
3496
    }
3497
 
3498
  gcc_assert (new_param_count > 0);
3499
 
3500
  if (optimize_function_for_size_p (cfun))
3501
    parm_size_limit = cur_parm_size;
3502
  else
3503
    parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3504
                       * cur_parm_size);
3505
 
3506
  if (total_size < agg_size
3507
      && total_size <= parm_size_limit)
3508
    {
3509
      if (dump_file)
3510
        fprintf (dump_file, "    ....will be split into %i components\n",
3511
                 new_param_count);
3512
      return new_param_count;
3513
    }
3514
  else
3515
    return 0;
3516
}
3517
 
3518
/* The order of the following enums is important, we need to do extra work for
3519
   UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES.  */
3520
enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3521
                          MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3522
 
3523
/* Identify representatives of all accesses to all candidate parameters for
3524
   IPA-SRA.  Return result based on what representatives have been found. */
3525
 
3526
static enum ipa_splicing_result
3527
splice_all_param_accesses (VEC (access_p, heap) **representatives)
3528
{
3529
  enum ipa_splicing_result result = NO_GOOD_ACCESS;
3530
  tree parm;
3531
  struct access *repr;
3532
 
3533
  *representatives = VEC_alloc (access_p, heap, func_param_count);
3534
 
3535
  for (parm = DECL_ARGUMENTS (current_function_decl);
3536
       parm;
3537
       parm = TREE_CHAIN (parm))
3538
    {
3539
      if (is_unused_scalar_param (parm))
3540
        {
3541
          VEC_quick_push (access_p, *representatives,
3542
                          &no_accesses_representant);
3543
          if (result == NO_GOOD_ACCESS)
3544
            result = UNUSED_PARAMS;
3545
        }
3546
      else if (POINTER_TYPE_P (TREE_TYPE (parm))
3547
               && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3548
               && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3549
        {
3550
          repr = unmodified_by_ref_scalar_representative (parm);
3551
          VEC_quick_push (access_p, *representatives, repr);
3552
          if (repr)
3553
            result = UNMODIF_BY_REF_ACCESSES;
3554
        }
3555
      else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3556
        {
3557
          bool ro_grp = false;
3558
          repr = splice_param_accesses (parm, &ro_grp);
3559
          VEC_quick_push (access_p, *representatives, repr);
3560
 
3561
          if (repr && !no_accesses_p (repr))
3562
            {
3563
              if (POINTER_TYPE_P (TREE_TYPE (parm)))
3564
                {
3565
                  if (ro_grp)
3566
                    result = UNMODIF_BY_REF_ACCESSES;
3567
                  else if (result < MODIF_BY_REF_ACCESSES)
3568
                    result = MODIF_BY_REF_ACCESSES;
3569
                }
3570
              else if (result < BY_VAL_ACCESSES)
3571
                result = BY_VAL_ACCESSES;
3572
            }
3573
          else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3574
            result = UNUSED_PARAMS;
3575
        }
3576
      else
3577
        VEC_quick_push (access_p, *representatives, NULL);
3578
    }
3579
 
3580
  if (result == NO_GOOD_ACCESS)
3581
    {
3582
      VEC_free (access_p, heap, *representatives);
3583
      *representatives = NULL;
3584
      return NO_GOOD_ACCESS;
3585
    }
3586
 
3587
  return result;
3588
}
3589
 
3590
/* Return the index of BASE in PARMS.  Abort if it is not found.  */
3591
 
3592
static inline int
3593
get_param_index (tree base, VEC(tree, heap) *parms)
3594
{
3595
  int i, len;
3596
 
3597
  len = VEC_length (tree, parms);
3598
  for (i = 0; i < len; i++)
3599
    if (VEC_index (tree, parms, i) == base)
3600
      return i;
3601
  gcc_unreachable ();
3602
}
3603
 
3604
/* Convert the decisions made at the representative level into compact
3605
   parameter adjustments.  REPRESENTATIVES are pointers to first
3606
   representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3607
   final number of adjustments.  */
3608
 
3609
static ipa_parm_adjustment_vec
3610
turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
3611
                                       int adjustments_count)
3612
{
3613
  VEC (tree, heap) *parms;
3614
  ipa_parm_adjustment_vec adjustments;
3615
  tree parm;
3616
  int i;
3617
 
3618
  gcc_assert (adjustments_count > 0);
3619
  parms = ipa_get_vector_of_formal_parms (current_function_decl);
3620
  adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
3621
  parm = DECL_ARGUMENTS (current_function_decl);
3622
  for (i = 0; i < func_param_count; i++, parm = TREE_CHAIN (parm))
3623
    {
3624
      struct access *repr = VEC_index (access_p, representatives, i);
3625
 
3626
      if (!repr || no_accesses_p (repr))
3627
        {
3628
          struct ipa_parm_adjustment *adj;
3629
 
3630
          adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3631
          memset (adj, 0, sizeof (*adj));
3632
          adj->base_index = get_param_index (parm, parms);
3633
          adj->base = parm;
3634
          if (!repr)
3635
            adj->copy_param = 1;
3636
          else
3637
            adj->remove_param = 1;
3638
        }
3639
      else
3640
        {
3641
          struct ipa_parm_adjustment *adj;
3642
          int index = get_param_index (parm, parms);
3643
 
3644
          for (; repr; repr = repr->next_grp)
3645
            {
3646
              adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3647
              memset (adj, 0, sizeof (*adj));
3648
              gcc_assert (repr->base == parm);
3649
              adj->base_index = index;
3650
              adj->base = repr->base;
3651
              adj->type = repr->type;
3652
              adj->offset = repr->offset;
3653
              adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
3654
                             && (repr->grp_maybe_modified
3655
                                 || repr->grp_not_necessarilly_dereferenced));
3656
 
3657
            }
3658
        }
3659
    }
3660
  VEC_free (tree, heap, parms);
3661
  return adjustments;
3662
}
3663
 
3664
/* Analyze the collected accesses and produce a plan what to do with the
3665
   parameters in the form of adjustments, NULL meaning nothing.  */
3666
 
3667
static ipa_parm_adjustment_vec
3668
analyze_all_param_acesses (void)
3669
{
3670
  enum ipa_splicing_result repr_state;
3671
  bool proceed = false;
3672
  int i, adjustments_count = 0;
3673
  VEC (access_p, heap) *representatives;
3674
  ipa_parm_adjustment_vec adjustments;
3675
 
3676
  repr_state = splice_all_param_accesses (&representatives);
3677
  if (repr_state == NO_GOOD_ACCESS)
3678
    return NULL;
3679
 
3680
  /* If there are any parameters passed by reference which are not modified
3681
     directly, we need to check whether they can be modified indirectly.  */
3682
  if (repr_state == UNMODIF_BY_REF_ACCESSES)
3683
    {
3684
      analyze_caller_dereference_legality (representatives);
3685
      analyze_modified_params (representatives);
3686
    }
3687
 
3688
  for (i = 0; i < func_param_count; i++)
3689
    {
3690
      struct access *repr = VEC_index (access_p, representatives, i);
3691
 
3692
      if (repr && !no_accesses_p (repr))
3693
        {
3694
          if (repr->grp_scalar_ptr)
3695
            {
3696
              adjustments_count++;
3697
              if (repr->grp_not_necessarilly_dereferenced
3698
                  || repr->grp_maybe_modified)
3699
                VEC_replace (access_p, representatives, i, NULL);
3700
              else
3701
                {
3702
                  proceed = true;
3703
                  sra_stats.scalar_by_ref_to_by_val++;
3704
                }
3705
            }
3706
          else
3707
            {
3708
              int new_components = decide_one_param_reduction (repr);
3709
 
3710
              if (new_components == 0)
3711
                {
3712
                  VEC_replace (access_p, representatives, i, NULL);
3713
                  adjustments_count++;
3714
                }
3715
              else
3716
                {
3717
                  adjustments_count += new_components;
3718
                  sra_stats.aggregate_params_reduced++;
3719
                  sra_stats.param_reductions_created += new_components;
3720
                  proceed = true;
3721
                }
3722
            }
3723
        }
3724
      else
3725
        {
3726
          if (no_accesses_p (repr))
3727
            {
3728
              proceed = true;
3729
              sra_stats.deleted_unused_parameters++;
3730
            }
3731
          adjustments_count++;
3732
        }
3733
    }
3734
 
3735
  if (!proceed && dump_file)
3736
    fprintf (dump_file, "NOT proceeding to change params.\n");
3737
 
3738
  if (proceed)
3739
    adjustments = turn_representatives_into_adjustments (representatives,
3740
                                                         adjustments_count);
3741
  else
3742
    adjustments = NULL;
3743
 
3744
  VEC_free (access_p, heap, representatives);
3745
  return adjustments;
3746
}
3747
 
3748
/* If a parameter replacement identified by ADJ does not yet exist in the form
3749
   of declaration, create it and record it, otherwise return the previously
3750
   created one.  */
3751
 
3752
static tree
3753
get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
3754
{
3755
  tree repl;
3756
  if (!adj->new_ssa_base)
3757
    {
3758
      char *pretty_name = make_fancy_name (adj->base);
3759
 
3760
      repl = create_tmp_var (TREE_TYPE (adj->base), "ISR");
3761
      if (TREE_CODE (TREE_TYPE (repl)) == COMPLEX_TYPE
3762
          || TREE_CODE (TREE_TYPE (repl)) == VECTOR_TYPE)
3763
        DECL_GIMPLE_REG_P (repl) = 1;
3764
      DECL_NAME (repl) = get_identifier (pretty_name);
3765
      obstack_free (&name_obstack, pretty_name);
3766
 
3767
      get_var_ann (repl);
3768
      add_referenced_var (repl);
3769
      adj->new_ssa_base = repl;
3770
    }
3771
  else
3772
    repl = adj->new_ssa_base;
3773
  return repl;
3774
}
3775
 
3776
/* Find the first adjustment for a particular parameter BASE in a vector of
3777
   ADJUSTMENTS which is not a copy_param.  Return NULL if there is no such
3778
   adjustment. */
3779
 
3780
static struct ipa_parm_adjustment *
3781
get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
3782
{
3783
  int i, len;
3784
 
3785
  len = VEC_length (ipa_parm_adjustment_t, adjustments);
3786
  for (i = 0; i < len; i++)
3787
    {
3788
      struct ipa_parm_adjustment *adj;
3789
 
3790
      adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3791
      if (!adj->copy_param && adj->base == base)
3792
        return adj;
3793
    }
3794
 
3795
  return NULL;
3796
}
3797
 
3798
/* Callback for scan_function.  If the statement STMT defines an SSA_NAME of a
3799
   parameter which is to be removed because its value is not used, replace the
3800
   SSA_NAME with a one relating to a created VAR_DECL and replace all of its
3801
   uses too and return true (update_stmt is then issued for the statement by
3802
   the caller).  DATA is a pointer to an adjustments vector.  */
3803
 
3804
static bool
3805
replace_removed_params_ssa_names (gimple stmt, void *data)
3806
{
3807
  VEC (ipa_parm_adjustment_t, heap) *adjustments;
3808
  struct ipa_parm_adjustment *adj;
3809
  tree lhs, decl, repl, name;
3810
 
3811
  adjustments = (VEC (ipa_parm_adjustment_t, heap) *) data;
3812
  if (gimple_code (stmt) == GIMPLE_PHI)
3813
    lhs = gimple_phi_result (stmt);
3814
  else if (is_gimple_assign (stmt))
3815
    lhs = gimple_assign_lhs (stmt);
3816
  else if (is_gimple_call (stmt))
3817
    lhs = gimple_call_lhs (stmt);
3818
  else
3819
    gcc_unreachable ();
3820
 
3821
  if (TREE_CODE (lhs) != SSA_NAME)
3822
    return false;
3823
  decl = SSA_NAME_VAR (lhs);
3824
  if (TREE_CODE (decl) != PARM_DECL)
3825
    return false;
3826
 
3827
  adj = get_adjustment_for_base (adjustments, decl);
3828
  if (!adj)
3829
    return false;
3830
 
3831
  repl = get_replaced_param_substitute (adj);
3832
  name = make_ssa_name (repl, stmt);
3833
 
3834
  if (dump_file)
3835
    {
3836
      fprintf (dump_file, "replacing an SSA name of a removed param ");
3837
      print_generic_expr (dump_file, lhs, 0);
3838
      fprintf (dump_file, " with ");
3839
      print_generic_expr (dump_file, name, 0);
3840
      fprintf (dump_file, "\n");
3841
    }
3842
 
3843
  if (is_gimple_assign (stmt))
3844
    gimple_assign_set_lhs (stmt, name);
3845
  else if (is_gimple_call (stmt))
3846
    gimple_call_set_lhs (stmt, name);
3847
  else
3848
    gimple_phi_set_result (stmt, name);
3849
 
3850
  replace_uses_by (lhs, name);
3851
  release_ssa_name (lhs);
3852
  return true;
3853
}
3854
 
3855
/* Callback for scan_function and helper to sra_ipa_modify_assign.  If the
3856
   expression *EXPR should be replaced by a reduction of a parameter, do so.
3857
   DATA is a pointer to a vector of adjustments.  DONT_CONVERT specifies
3858
   whether the function should care about type incompatibility the current and
3859
   new expressions.  If it is true, the function will leave incompatibility
3860
   issues to the caller.
3861
 
3862
   When called directly by scan_function, DONT_CONVERT is true when the EXPR is
3863
   a write (LHS) expression.  */
3864
 
3865
static bool
3866
sra_ipa_modify_expr (tree *expr, gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
3867
                     bool dont_convert, void *data)
3868
{
3869
  ipa_parm_adjustment_vec adjustments;
3870
  int i, len;
3871
  struct ipa_parm_adjustment *adj, *cand = NULL;
3872
  HOST_WIDE_INT offset, size, max_size;
3873
  tree base, src;
3874
 
3875
  adjustments = (VEC (ipa_parm_adjustment_t, heap) *) data;
3876
  len = VEC_length (ipa_parm_adjustment_t, adjustments);
3877
 
3878
  if (TREE_CODE (*expr) == BIT_FIELD_REF
3879
      || TREE_CODE (*expr) == IMAGPART_EXPR
3880
      || TREE_CODE (*expr) == REALPART_EXPR)
3881
    {
3882
      expr = &TREE_OPERAND (*expr, 0);
3883
      dont_convert = false;
3884
    }
3885
 
3886
  base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
3887
  if (!base || size == -1 || max_size == -1)
3888
    return false;
3889
 
3890
  if (INDIRECT_REF_P (base))
3891
    base = TREE_OPERAND (base, 0);
3892
 
3893
  base = get_ssa_base_param (base);
3894
  if (!base || TREE_CODE (base) != PARM_DECL)
3895
    return false;
3896
 
3897
  for (i = 0; i < len; i++)
3898
    {
3899
      adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3900
 
3901
      if (adj->base == base &&
3902
          (adj->offset == offset || adj->remove_param))
3903
        {
3904
          cand = adj;
3905
          break;
3906
        }
3907
    }
3908
  if (!cand || cand->copy_param || cand->remove_param)
3909
    return false;
3910
 
3911
  if (cand->by_ref)
3912
    {
3913
      tree folded;
3914
      src = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (cand->reduction)),
3915
                    cand->reduction);
3916
      folded = gimple_fold_indirect_ref (src);
3917
      if (folded)
3918
        src = folded;
3919
    }
3920
  else
3921
    src = cand->reduction;
3922
 
3923
  if (dump_file && (dump_flags & TDF_DETAILS))
3924
    {
3925
      fprintf (dump_file, "About to replace expr ");
3926
      print_generic_expr (dump_file, *expr, 0);
3927
      fprintf (dump_file, " with ");
3928
      print_generic_expr (dump_file, src, 0);
3929
      fprintf (dump_file, "\n");
3930
    }
3931
 
3932
  if (!dont_convert
3933
      && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3934
    {
3935
      tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3936
      *expr = vce;
3937
    }
3938
  else
3939
    *expr = src;
3940
  return true;
3941
}
3942
 
3943
/* Callback for scan_function to process assign statements.  Performs
3944
   essentially the same function like sra_ipa_modify_expr.  */
3945
 
3946
static enum scan_assign_result
3947
sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi, void *data)
3948
{
3949
  gimple stmt = *stmt_ptr;
3950
  tree *lhs_p, *rhs_p;
3951
  bool any;
3952
 
3953
  if (!gimple_assign_single_p (stmt))
3954
    return SRA_SA_NONE;
3955
 
3956
  rhs_p = gimple_assign_rhs1_ptr (stmt);
3957
  lhs_p = gimple_assign_lhs_ptr (stmt);
3958
 
3959
  any = sra_ipa_modify_expr (rhs_p, gsi, true, data);
3960
  any |= sra_ipa_modify_expr (lhs_p, gsi, true, data);
3961
  if (any)
3962
    {
3963
      tree new_rhs = NULL_TREE;
3964
 
3965
      if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
3966
        {
3967
          if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
3968
            {
3969
              /* V_C_Es of constructors can cause trouble (PR 42714).  */
3970
              if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
3971
                *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
3972
              else
3973
                *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
3974
            }
3975
          else
3976
            new_rhs = fold_build1_loc (gimple_location (stmt),
3977
                                       VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
3978
                                       *rhs_p);
3979
        }
3980
      else if (REFERENCE_CLASS_P (*rhs_p)
3981
               && is_gimple_reg_type (TREE_TYPE (*lhs_p))
3982
               && !is_gimple_reg (*lhs_p))
3983
        /* This can happen when an assignment in between two single field
3984
           structures is turned into an assignment in between two pointers to
3985
           scalars (PR 42237).  */
3986
        new_rhs = *rhs_p;
3987
 
3988
      if (new_rhs)
3989
        {
3990
          tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
3991
                                               true, GSI_SAME_STMT);
3992
 
3993
          gimple_assign_set_rhs_from_tree (gsi, tmp);
3994
        }
3995
 
3996
      return SRA_SA_PROCESSED;
3997
    }
3998
 
3999
  return SRA_SA_NONE;
4000
}
4001
 
4002
/* Call gimple_debug_bind_reset_value on all debug statements describing
4003
   gimple register parameters that are being removed or replaced.  */
4004
 
4005
static void
4006
sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4007
{
4008
  int i, len;
4009
 
4010
  len = VEC_length (ipa_parm_adjustment_t, adjustments);
4011
  for (i = 0; i < len; i++)
4012
    {
4013
      struct ipa_parm_adjustment *adj;
4014
      imm_use_iterator ui;
4015
      gimple stmt;
4016
      tree name;
4017
 
4018
      adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4019
      if (adj->copy_param || !is_gimple_reg (adj->base))
4020
        continue;
4021
      name = gimple_default_def (cfun, adj->base);
4022
      if (!name)
4023
        continue;
4024
      FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4025
        {
4026
          /* All other users must have been removed by scan_function.  */
4027
          gcc_assert (is_gimple_debug (stmt));
4028
          gimple_debug_bind_reset_value (stmt);
4029
          update_stmt (stmt);
4030
        }
4031
    }
4032
}
4033
 
4034
/* Return true iff all callers have at least as many actual arguments as there
4035
   are formal parameters in the current function.  */
4036
 
4037
static bool
4038
all_callers_have_enough_arguments_p (struct cgraph_node *node)
4039
{
4040
  struct cgraph_edge *cs;
4041
  for (cs = node->callers; cs; cs = cs->next_caller)
4042
    if (!callsite_has_enough_arguments_p (cs->call_stmt))
4043
      return false;
4044
 
4045
  return true;
4046
}
4047
 
4048
 
4049
/* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS.  */
4050
 
4051
static void
4052
convert_callers (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4053
{
4054
  tree old_cur_fndecl = current_function_decl;
4055
  struct cgraph_edge *cs;
4056
  bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4057
 
4058
  for (cs = node->callers; cs; cs = cs->next_caller)
4059
    {
4060
      current_function_decl = cs->caller->decl;
4061
      push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4062
 
4063
      if (dump_file)
4064
        fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4065
                 cs->caller->uid, cs->callee->uid,
4066
                 cgraph_node_name (cs->caller),
4067
                 cgraph_node_name (cs->callee));
4068
 
4069
      ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4070
 
4071
      pop_cfun ();
4072
    }
4073
 
4074
  for (cs = node->callers; cs; cs = cs->next_caller)
4075
    if (cs->caller != node
4076
        && !bitmap_bit_p (recomputed_callers, cs->caller->uid))
4077
      {
4078
        compute_inline_parameters (cs->caller);
4079
        bitmap_set_bit (recomputed_callers, cs->caller->uid);
4080
      }
4081
  BITMAP_FREE (recomputed_callers);
4082
 
4083
  current_function_decl = old_cur_fndecl;
4084
  return;
4085
}
4086
 
4087
/* Perform all the modification required in IPA-SRA for NODE to have parameters
4088
   as given in ADJUSTMENTS.  */
4089
 
4090
static void
4091
modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4092
{
4093
  struct cgraph_node *new_node;
4094
  struct cgraph_edge *cs;
4095
  VEC (cgraph_edge_p, heap) * redirect_callers;
4096
  int node_callers;
4097
 
4098
  node_callers = 0;
4099
  for (cs = node->callers; cs != NULL; cs = cs->next_caller)
4100
    node_callers++;
4101
  redirect_callers = VEC_alloc (cgraph_edge_p, heap, node_callers);
4102
  for (cs = node->callers; cs != NULL; cs = cs->next_caller)
4103
    VEC_quick_push (cgraph_edge_p, redirect_callers, cs);
4104
 
4105
  rebuild_cgraph_edges ();
4106
  pop_cfun ();
4107
  current_function_decl = NULL_TREE;
4108
 
4109
  new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL);
4110
  current_function_decl = new_node->decl;
4111
  push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4112
 
4113
  ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4114
  scan_function (sra_ipa_modify_expr, sra_ipa_modify_assign,
4115
                 replace_removed_params_ssa_names, false, adjustments);
4116
  sra_ipa_reset_debug_stmts (adjustments);
4117
  convert_callers (new_node, adjustments);
4118
  cgraph_make_node_local (new_node);
4119
  return;
4120
}
4121
 
4122
/* Return false the function is apparently unsuitable for IPA-SRA based on it's
4123
   attributes, return true otherwise.  NODE is the cgraph node of the current
4124
   function.  */
4125
 
4126
static bool
4127
ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4128
{
4129
  if (!cgraph_node_can_be_local_p (node))
4130
    {
4131
      if (dump_file)
4132
        fprintf (dump_file, "Function not local to this compilation unit.\n");
4133
      return false;
4134
    }
4135
 
4136
  if (!tree_versionable_function_p (node->decl))
4137
    {
4138
      if (dump_file)
4139
        fprintf (dump_file, "Function not local to this compilation unit.\n");
4140
      return false;
4141
    }
4142
 
4143
  if (DECL_VIRTUAL_P (current_function_decl))
4144
    {
4145
      if (dump_file)
4146
        fprintf (dump_file, "Function is a virtual method.\n");
4147
      return false;
4148
    }
4149
 
4150
  if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4151
      && node->global.size >= MAX_INLINE_INSNS_AUTO)
4152
    {
4153
      if (dump_file)
4154
        fprintf (dump_file, "Function too big to be made truly local.\n");
4155
      return false;
4156
    }
4157
 
4158
  if (!node->callers)
4159
    {
4160
      if (dump_file)
4161
        fprintf (dump_file,
4162
                 "Function has no callers in this compilation unit.\n");
4163
      return false;
4164
    }
4165
 
4166
  if (cfun->stdarg)
4167
    {
4168
      if (dump_file)
4169
        fprintf (dump_file, "Function uses stdarg. \n");
4170
      return false;
4171
    }
4172
 
4173
  if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4174
    return false;
4175
 
4176
  return true;
4177
}
4178
 
4179
/* Perform early interprocedural SRA.  */
4180
 
4181
static unsigned int
4182
ipa_early_sra (void)
4183
{
4184
  struct cgraph_node *node = cgraph_node (current_function_decl);
4185
  ipa_parm_adjustment_vec adjustments;
4186
  int ret = 0;
4187
 
4188
  if (!ipa_sra_preliminary_function_checks (node))
4189
    return 0;
4190
 
4191
  sra_initialize ();
4192
  sra_mode = SRA_MODE_EARLY_IPA;
4193
 
4194
  if (!find_param_candidates ())
4195
    {
4196
      if (dump_file)
4197
        fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4198
      goto simple_out;
4199
    }
4200
 
4201
  if (!all_callers_have_enough_arguments_p (node))
4202
    {
4203
      if (dump_file)
4204
        fprintf (dump_file, "There are callers with insufficient number of "
4205
                 "arguments.\n");
4206
      goto simple_out;
4207
    }
4208
 
4209
  bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4210
                                 func_param_count
4211
                                 * last_basic_block_for_function (cfun));
4212
  final_bbs = BITMAP_ALLOC (NULL);
4213
 
4214
  scan_function (build_access_from_expr, build_accesses_from_assign,
4215
                 NULL, true, NULL);
4216
  if (encountered_apply_args)
4217
    {
4218
      if (dump_file)
4219
        fprintf (dump_file, "Function calls  __builtin_apply_args().\n");
4220
      goto out;
4221
    }
4222
 
4223
  if (encountered_unchangable_recursive_call)
4224
    {
4225
      if (dump_file)
4226
        fprintf (dump_file, "Function calls itself with insufficient "
4227
                 "number of arguments.\n");
4228
      goto out;
4229
    }
4230
 
4231
  adjustments = analyze_all_param_acesses ();
4232
  if (!adjustments)
4233
    goto out;
4234
  if (dump_file)
4235
    ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4236
 
4237
  modify_function (node, adjustments);
4238
  VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4239
  ret = TODO_update_ssa;
4240
 
4241
  statistics_counter_event (cfun, "Unused parameters deleted",
4242
                            sra_stats.deleted_unused_parameters);
4243
  statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4244
                            sra_stats.scalar_by_ref_to_by_val);
4245
  statistics_counter_event (cfun, "Aggregate parameters broken up",
4246
                            sra_stats.aggregate_params_reduced);
4247
  statistics_counter_event (cfun, "Aggregate parameter components created",
4248
                            sra_stats.param_reductions_created);
4249
 
4250
 out:
4251
  BITMAP_FREE (final_bbs);
4252
  free (bb_dereferences);
4253
 simple_out:
4254
  sra_deinitialize ();
4255
  return ret;
4256
}
4257
 
4258
/* Return if early ipa sra shall be performed.  */
4259
static bool
4260
ipa_early_sra_gate (void)
4261
{
4262
  return flag_ipa_sra;
4263
}
4264
 
4265
struct gimple_opt_pass pass_early_ipa_sra =
4266
{
4267
 {
4268
  GIMPLE_PASS,
4269
  "eipa_sra",                           /* name */
4270
  ipa_early_sra_gate,                   /* gate */
4271
  ipa_early_sra,                        /* execute */
4272
  NULL,                                 /* sub */
4273
  NULL,                                 /* next */
4274
  0,                                     /* static_pass_number */
4275
  TV_IPA_SRA,                           /* tv_id */
4276
  0,                                     /* properties_required */
4277
  0,                                     /* properties_provided */
4278
  0,                                     /* properties_destroyed */
4279
  0,                                     /* todo_flags_start */
4280
  TODO_dump_func | TODO_dump_cgraph     /* todo_flags_finish */
4281
 }
4282
};
4283
 
4284
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.