OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [tree-sra.c] - Blame information for rev 714

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Scalar Replacement of Aggregates (SRA) converts some structure
2
   references into scalar references, exposing them to the scalar
3
   optimizers.
4
   Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
5
   Contributed by Martin Jambor <mjambor@suse.cz>
6
 
7
This file is part of GCC.
8
 
9
GCC is free software; you can redistribute it and/or modify it under
10
the terms of the GNU General Public License as published by the Free
11
Software Foundation; either version 3, or (at your option) any later
12
version.
13
 
14
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15
WARRANTY; without even the implied warranty of MERCHANTABILITY or
16
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17
for more details.
18
 
19
You should have received a copy of the GNU General Public License
20
along with GCC; see the file COPYING3.  If not see
21
<http://www.gnu.org/licenses/>.  */
22
 
23
/* This file implements Scalar Reduction of Aggregates (SRA).  SRA is run
24
   twice, once in the early stages of compilation (early SRA) and once in the
25
   late stages (late SRA).  The aim of both is to turn references to scalar
26
   parts of aggregates into uses of independent scalar variables.
27
 
28
   The two passes are nearly identical, the only difference is that early SRA
29
   does not scalarize unions which are used as the result in a GIMPLE_RETURN
30
   statement because together with inlining this can lead to weird type
31
   conversions.
32
 
33
   Both passes operate in four stages:
34
 
35
   1. The declarations that have properties which make them candidates for
36
      scalarization are identified in function find_var_candidates().  The
37
      candidates are stored in candidate_bitmap.
38
 
39
   2. The function body is scanned.  In the process, declarations which are
40
      used in a manner that prevent their scalarization are removed from the
41
      candidate bitmap.  More importantly, for every access into an aggregate,
42
      an access structure (struct access) is created by create_access() and
43
      stored in a vector associated with the aggregate.  Among other
44
      information, the aggregate declaration, the offset and size of the access
45
      and its type are stored in the structure.
46
 
47
      On a related note, assign_link structures are created for every assign
48
      statement between candidate aggregates and attached to the related
49
      accesses.
50
 
51
   3. The vectors of accesses are analyzed.  They are first sorted according to
52
      their offset and size and then scanned for partially overlapping accesses
53
      (i.e. those which overlap but one is not entirely within another).  Such
54
      an access disqualifies the whole aggregate from being scalarized.
55
 
56
      If there is no such inhibiting overlap, a representative access structure
57
      is chosen for every unique combination of offset and size.  Afterwards,
58
      the pass builds a set of trees from these structures, in which children
59
      of an access are within their parent (in terms of offset and size).
60
 
61
      Then accesses  are propagated  whenever possible (i.e.  in cases  when it
62
      does not create a partially overlapping access) across assign_links from
63
      the right hand side to the left hand side.
64
 
65
      Then the set of trees for each declaration is traversed again and those
66
      accesses which should be replaced by a scalar are identified.
67
 
68
   4. The function is traversed again, and for every reference into an
69
      aggregate that has some component which is about to be scalarized,
70
      statements are amended and new statements are created as necessary.
71
      Finally, if a parameter got scalarized, the scalar replacements are
72
      initialized with values from respective parameter aggregates.  */
73
 
74
#include "config.h"
75
#include "system.h"
76
#include "coretypes.h"
77
#include "alloc-pool.h"
78
#include "tm.h"
79
#include "tree.h"
80
#include "gimple.h"
81
#include "cgraph.h"
82
#include "tree-flow.h"
83
#include "ipa-prop.h"
84
#include "tree-pretty-print.h"
85
#include "statistics.h"
86
#include "tree-dump.h"
87
#include "timevar.h"
88
#include "params.h"
89
#include "target.h"
90
#include "flags.h"
91
#include "dbgcnt.h"
92
#include "tree-inline.h"
93
#include "gimple-pretty-print.h"
94
#include "ipa-inline.h"
95
 
96
/* Enumeration of all aggregate reductions we can do.  */
97
enum sra_mode { SRA_MODE_EARLY_IPA,   /* early call regularization */
98
                SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
99
                SRA_MODE_INTRA };     /* late intraprocedural SRA */
100
 
101
/* Global variable describing which aggregate reduction we are performing at
102
   the moment.  */
103
static enum sra_mode sra_mode;
104
 
105
struct assign_link;
106
 
107
/* ACCESS represents each access to an aggregate variable (as a whole or a
108
   part).  It can also represent a group of accesses that refer to exactly the
109
   same fragment of an aggregate (i.e. those that have exactly the same offset
110
   and size).  Such representatives for a single aggregate, once determined,
111
   are linked in a linked list and have the group fields set.
112
 
113
   Moreover, when doing intraprocedural SRA, a tree is built from those
114
   representatives (by the means of first_child and next_sibling pointers), in
115
   which all items in a subtree are "within" the root, i.e. their offset is
116
   greater or equal to offset of the root and offset+size is smaller or equal
117
   to offset+size of the root.  Children of an access are sorted by offset.
118
 
119
   Note that accesses to parts of vector and complex number types always
120
   represented by an access to the whole complex number or a vector.  It is a
121
   duty of the modifying functions to replace them appropriately.  */
122
 
123
struct access
124
{
125
  /* Values returned by  `get_ref_base_and_extent' for each component reference
126
     If EXPR isn't a component reference  just set `BASE = EXPR', `OFFSET = 0',
127
     `SIZE = TREE_SIZE (TREE_TYPE (expr))'.  */
128
  HOST_WIDE_INT offset;
129
  HOST_WIDE_INT size;
130
  tree base;
131
 
132
  /* Expression.  It is context dependent so do not use it to create new
133
     expressions to access the original aggregate.  See PR 42154 for a
134
     testcase.  */
135
  tree expr;
136
  /* Type.  */
137
  tree type;
138
 
139
  /* The statement this access belongs to.  */
140
  gimple stmt;
141
 
142
  /* Next group representative for this aggregate. */
143
  struct access *next_grp;
144
 
145
  /* Pointer to the group representative.  Pointer to itself if the struct is
146
     the representative.  */
147
  struct access *group_representative;
148
 
149
  /* If this access has any children (in terms of the definition above), this
150
     points to the first one.  */
151
  struct access *first_child;
152
 
153
  /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154
     described above.  In IPA-SRA this is a pointer to the next access
155
     belonging to the same group (having the same representative).  */
156
  struct access *next_sibling;
157
 
158
  /* Pointers to the first and last element in the linked list of assign
159
     links.  */
160
  struct assign_link *first_link, *last_link;
161
 
162
  /* Pointer to the next access in the work queue.  */
163
  struct access *next_queued;
164
 
165
  /* Replacement variable for this access "region."  Never to be accessed
166
     directly, always only by the means of get_access_replacement() and only
167
     when grp_to_be_replaced flag is set.  */
168
  tree replacement_decl;
169
 
170
  /* Is this particular access write access? */
171
  unsigned write : 1;
172
 
173
  /* Is this access an access to a non-addressable field? */
174
  unsigned non_addressable : 1;
175
 
176
  /* Is this access currently in the work queue?  */
177
  unsigned grp_queued : 1;
178
 
179
  /* Does this group contain a write access?  This flag is propagated down the
180
     access tree.  */
181
  unsigned grp_write : 1;
182
 
183
  /* Does this group contain a read access?  This flag is propagated down the
184
     access tree.  */
185
  unsigned grp_read : 1;
186
 
187
  /* Does this group contain a read access that comes from an assignment
188
     statement?  This flag is propagated down the access tree.  */
189
  unsigned grp_assignment_read : 1;
190
 
191
  /* Does this group contain a write access that comes from an assignment
192
     statement?  This flag is propagated down the access tree.  */
193
  unsigned grp_assignment_write : 1;
194
 
195
  /* Does this group contain a read access through a scalar type?  This flag is
196
     not propagated in the access tree in any direction.  */
197
  unsigned grp_scalar_read : 1;
198
 
199
  /* Does this group contain a write access through a scalar type?  This flag
200
     is not propagated in the access tree in any direction.  */
201
  unsigned grp_scalar_write : 1;
202
 
203
  /* Is this access an artificial one created to scalarize some record
204
     entirely? */
205
  unsigned grp_total_scalarization : 1;
206
 
207
  /* Other passes of the analysis use this bit to make function
208
     analyze_access_subtree create scalar replacements for this group if
209
     possible.  */
210
  unsigned grp_hint : 1;
211
 
212
  /* Is the subtree rooted in this access fully covered by scalar
213
     replacements?  */
214
  unsigned grp_covered : 1;
215
 
216
  /* If set to true, this access and all below it in an access tree must not be
217
     scalarized.  */
218
  unsigned grp_unscalarizable_region : 1;
219
 
220
  /* Whether data have been written to parts of the aggregate covered by this
221
     access which is not to be scalarized.  This flag is propagated up in the
222
     access tree.  */
223
  unsigned grp_unscalarized_data : 1;
224
 
225
  /* Does this access and/or group contain a write access through a
226
     BIT_FIELD_REF?  */
227
  unsigned grp_partial_lhs : 1;
228
 
229
  /* Set when a scalar replacement should be created for this variable.  We do
230
     the decision and creation at different places because create_tmp_var
231
     cannot be called from within FOR_EACH_REFERENCED_VAR. */
232
  unsigned grp_to_be_replaced : 1;
233
 
234
  /* Should TREE_NO_WARNING of a replacement be set?  */
235
  unsigned grp_no_warning : 1;
236
 
237
  /* Is it possible that the group refers to data which might be (directly or
238
     otherwise) modified?  */
239
  unsigned grp_maybe_modified : 1;
240
 
241
  /* Set when this is a representative of a pointer to scalar (i.e. by
242
     reference) parameter which we consider for turning into a plain scalar
243
     (i.e. a by value parameter).  */
244
  unsigned grp_scalar_ptr : 1;
245
 
246
  /* Set when we discover that this pointer is not safe to dereference in the
247
     caller.  */
248
  unsigned grp_not_necessarilly_dereferenced : 1;
249
};
250
 
251
typedef struct access *access_p;
252
 
253
DEF_VEC_P (access_p);
254
DEF_VEC_ALLOC_P (access_p, heap);
255
 
256
/* Alloc pool for allocating access structures.  */
257
static alloc_pool access_pool;
258
 
259
/* A structure linking lhs and rhs accesses from an aggregate assignment.  They
260
   are used to propagate subaccesses from rhs to lhs as long as they don't
261
   conflict with what is already there.  */
262
struct assign_link
263
{
264
  struct access *lacc, *racc;
265
  struct assign_link *next;
266
};
267
 
268
/* Alloc pool for allocating assign link structures.  */
269
static alloc_pool link_pool;
270
 
271
/* Base (tree) -> Vector (VEC(access_p,heap) *) map.  */
272
static struct pointer_map_t *base_access_vec;
273
 
274
/* Bitmap of candidates.  */
275
static bitmap candidate_bitmap;
276
 
277
/* Bitmap of candidates which we should try to entirely scalarize away and
278
   those which cannot be (because they are and need be used as a whole).  */
279
static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
280
 
281
/* Obstack for creation of fancy names.  */
282
static struct obstack name_obstack;
283
 
284
/* Head of a linked list of accesses that need to have its subaccesses
285
   propagated to their assignment counterparts. */
286
static struct access *work_queue_head;
287
 
288
/* Number of parameters of the analyzed function when doing early ipa SRA.  */
289
static int func_param_count;
290
 
291
/* scan_function sets the following to true if it encounters a call to
292
   __builtin_apply_args.  */
293
static bool encountered_apply_args;
294
 
295
/* Set by scan_function when it finds a recursive call.  */
296
static bool encountered_recursive_call;
297
 
298
/* Set by scan_function when it finds a recursive call with less actual
299
   arguments than formal parameters..  */
300
static bool encountered_unchangable_recursive_call;
301
 
302
/* This is a table in which for each basic block and parameter there is a
303
   distance (offset + size) in that parameter which is dereferenced and
304
   accessed in that BB.  */
305
static HOST_WIDE_INT *bb_dereferences;
306
/* Bitmap of BBs that can cause the function to "stop" progressing by
307
   returning, throwing externally, looping infinitely or calling a function
308
   which might abort etc.. */
309
static bitmap final_bbs;
310
 
311
/* Representative of no accesses at all. */
312
static struct access  no_accesses_representant;
313
 
314
/* Predicate to test the special value.  */
315
 
316
static inline bool
317
no_accesses_p (struct access *access)
318
{
319
  return access == &no_accesses_representant;
320
}
321
 
322
/* Dump contents of ACCESS to file F in a human friendly way.  If GRP is true,
323
   representative fields are dumped, otherwise those which only describe the
324
   individual access are.  */
325
 
326
static struct
327
{
328
  /* Number of processed aggregates is readily available in
329
     analyze_all_variable_accesses and so is not stored here.  */
330
 
331
  /* Number of created scalar replacements.  */
332
  int replacements;
333
 
334
  /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
335
     expression.  */
336
  int exprs;
337
 
338
  /* Number of statements created by generate_subtree_copies.  */
339
  int subtree_copies;
340
 
341
  /* Number of statements created by load_assign_lhs_subreplacements.  */
342
  int subreplacements;
343
 
344
  /* Number of times sra_modify_assign has deleted a statement.  */
345
  int deleted;
346
 
347
  /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
348
     RHS reparately due to type conversions or nonexistent matching
349
     references.  */
350
  int separate_lhs_rhs_handling;
351
 
352
  /* Number of parameters that were removed because they were unused.  */
353
  int deleted_unused_parameters;
354
 
355
  /* Number of scalars passed as parameters by reference that have been
356
     converted to be passed by value.  */
357
  int scalar_by_ref_to_by_val;
358
 
359
  /* Number of aggregate parameters that were replaced by one or more of their
360
     components.  */
361
  int aggregate_params_reduced;
362
 
363
  /* Numbber of components created when splitting aggregate parameters.  */
364
  int param_reductions_created;
365
} sra_stats;
366
 
367
static void
368
dump_access (FILE *f, struct access *access, bool grp)
369
{
370
  fprintf (f, "access { ");
371
  fprintf (f, "base = (%d)'", DECL_UID (access->base));
372
  print_generic_expr (f, access->base, 0);
373
  fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
374
  fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
375
  fprintf (f, ", expr = ");
376
  print_generic_expr (f, access->expr, 0);
377
  fprintf (f, ", type = ");
378
  print_generic_expr (f, access->type, 0);
379
  if (grp)
380
    fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
381
             "grp_assignment_write = %d, grp_scalar_read = %d, "
382
             "grp_scalar_write = %d, grp_total_scalarization = %d, "
383
             "grp_hint = %d, grp_covered = %d, "
384
             "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
385
             "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
386
             "grp_maybe_modified = %d, "
387
             "grp_not_necessarilly_dereferenced = %d\n",
388
             access->grp_read, access->grp_write, access->grp_assignment_read,
389
             access->grp_assignment_write, access->grp_scalar_read,
390
             access->grp_scalar_write, access->grp_total_scalarization,
391
             access->grp_hint, access->grp_covered,
392
             access->grp_unscalarizable_region, access->grp_unscalarized_data,
393
             access->grp_partial_lhs, access->grp_to_be_replaced,
394
             access->grp_maybe_modified,
395
             access->grp_not_necessarilly_dereferenced);
396
  else
397
    fprintf (f, ", write = %d, grp_total_scalarization = %d, "
398
             "grp_partial_lhs = %d\n",
399
             access->write, access->grp_total_scalarization,
400
             access->grp_partial_lhs);
401
}
402
 
403
/* Dump a subtree rooted in ACCESS to file F, indent by LEVEL.  */
404
 
405
static void
406
dump_access_tree_1 (FILE *f, struct access *access, int level)
407
{
408
  do
409
    {
410
      int i;
411
 
412
      for (i = 0; i < level; i++)
413
        fputs ("* ", dump_file);
414
 
415
      dump_access (f, access, true);
416
 
417
      if (access->first_child)
418
        dump_access_tree_1 (f, access->first_child, level + 1);
419
 
420
      access = access->next_sibling;
421
    }
422
  while (access);
423
}
424
 
425
/* Dump all access trees for a variable, given the pointer to the first root in
426
   ACCESS.  */
427
 
428
static void
429
dump_access_tree (FILE *f, struct access *access)
430
{
431
  for (; access; access = access->next_grp)
432
    dump_access_tree_1 (f, access, 0);
433
}
434
 
435
/* Return true iff ACC is non-NULL and has subaccesses.  */
436
 
437
static inline bool
438
access_has_children_p (struct access *acc)
439
{
440
  return acc && acc->first_child;
441
}
442
 
443
/* Return true iff ACC is (partly) covered by at least one replacement.  */
444
 
445
static bool
446
access_has_replacements_p (struct access *acc)
447
{
448
  struct access *child;
449
  if (acc->grp_to_be_replaced)
450
    return true;
451
  for (child = acc->first_child; child; child = child->next_sibling)
452
    if (access_has_replacements_p (child))
453
      return true;
454
  return false;
455
}
456
 
457
/* Return a vector of pointers to accesses for the variable given in BASE or
458
   NULL if there is none.  */
459
 
460
static VEC (access_p, heap) *
461
get_base_access_vector (tree base)
462
{
463
  void **slot;
464
 
465
  slot = pointer_map_contains (base_access_vec, base);
466
  if (!slot)
467
    return NULL;
468
  else
469
    return *(VEC (access_p, heap) **) slot;
470
}
471
 
472
/* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
473
   in ACCESS.  Return NULL if it cannot be found.  */
474
 
475
static struct access *
476
find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
477
                        HOST_WIDE_INT size)
478
{
479
  while (access && (access->offset != offset || access->size != size))
480
    {
481
      struct access *child = access->first_child;
482
 
483
      while (child && (child->offset + child->size <= offset))
484
        child = child->next_sibling;
485
      access = child;
486
    }
487
 
488
  return access;
489
}
490
 
491
/* Return the first group representative for DECL or NULL if none exists.  */
492
 
493
static struct access *
494
get_first_repr_for_decl (tree base)
495
{
496
  VEC (access_p, heap) *access_vec;
497
 
498
  access_vec = get_base_access_vector (base);
499
  if (!access_vec)
500
    return NULL;
501
 
502
  return VEC_index (access_p, access_vec, 0);
503
}
504
 
505
/* Find an access representative for the variable BASE and given OFFSET and
506
   SIZE.  Requires that access trees have already been built.  Return NULL if
507
   it cannot be found.  */
508
 
509
static struct access *
510
get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
511
                                 HOST_WIDE_INT size)
512
{
513
  struct access *access;
514
 
515
  access = get_first_repr_for_decl (base);
516
  while (access && (access->offset + access->size <= offset))
517
    access = access->next_grp;
518
  if (!access)
519
    return NULL;
520
 
521
  return find_access_in_subtree (access, offset, size);
522
}
523
 
524
/* Add LINK to the linked list of assign links of RACC.  */
525
static void
526
add_link_to_rhs (struct access *racc, struct assign_link *link)
527
{
528
  gcc_assert (link->racc == racc);
529
 
530
  if (!racc->first_link)
531
    {
532
      gcc_assert (!racc->last_link);
533
      racc->first_link = link;
534
    }
535
  else
536
    racc->last_link->next = link;
537
 
538
  racc->last_link = link;
539
  link->next = NULL;
540
}
541
 
542
/* Move all link structures in their linked list in OLD_RACC to the linked list
543
   in NEW_RACC.  */
544
static void
545
relink_to_new_repr (struct access *new_racc, struct access *old_racc)
546
{
547
  if (!old_racc->first_link)
548
    {
549
      gcc_assert (!old_racc->last_link);
550
      return;
551
    }
552
 
553
  if (new_racc->first_link)
554
    {
555
      gcc_assert (!new_racc->last_link->next);
556
      gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
557
 
558
      new_racc->last_link->next = old_racc->first_link;
559
      new_racc->last_link = old_racc->last_link;
560
    }
561
  else
562
    {
563
      gcc_assert (!new_racc->last_link);
564
 
565
      new_racc->first_link = old_racc->first_link;
566
      new_racc->last_link = old_racc->last_link;
567
    }
568
  old_racc->first_link = old_racc->last_link = NULL;
569
}
570
 
571
/* Add ACCESS to the work queue (which is actually a stack).  */
572
 
573
static void
574
add_access_to_work_queue (struct access *access)
575
{
576
  if (!access->grp_queued)
577
    {
578
      gcc_assert (!access->next_queued);
579
      access->next_queued = work_queue_head;
580
      access->grp_queued = 1;
581
      work_queue_head = access;
582
    }
583
}
584
 
585
/* Pop an access from the work queue, and return it, assuming there is one.  */
586
 
587
static struct access *
588
pop_access_from_work_queue (void)
589
{
590
  struct access *access = work_queue_head;
591
 
592
  work_queue_head = access->next_queued;
593
  access->next_queued = NULL;
594
  access->grp_queued = 0;
595
  return access;
596
}
597
 
598
 
599
/* Allocate necessary structures.  */
600
 
601
static void
602
sra_initialize (void)
603
{
604
  candidate_bitmap = BITMAP_ALLOC (NULL);
605
  should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
606
  cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
607
  gcc_obstack_init (&name_obstack);
608
  access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
609
  link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
610
  base_access_vec = pointer_map_create ();
611
  memset (&sra_stats, 0, sizeof (sra_stats));
612
  encountered_apply_args = false;
613
  encountered_recursive_call = false;
614
  encountered_unchangable_recursive_call = false;
615
}
616
 
617
/* Hook fed to pointer_map_traverse, deallocate stored vectors.  */
618
 
619
static bool
620
delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
621
                     void *data ATTRIBUTE_UNUSED)
622
{
623
  VEC (access_p, heap) *access_vec;
624
  access_vec = (VEC (access_p, heap) *) *value;
625
  VEC_free (access_p, heap, access_vec);
626
 
627
  return true;
628
}
629
 
630
/* Deallocate all general structures.  */
631
 
632
static void
633
sra_deinitialize (void)
634
{
635
  BITMAP_FREE (candidate_bitmap);
636
  BITMAP_FREE (should_scalarize_away_bitmap);
637
  BITMAP_FREE (cannot_scalarize_away_bitmap);
638
  free_alloc_pool (access_pool);
639
  free_alloc_pool (link_pool);
640
  obstack_free (&name_obstack, NULL);
641
 
642
  pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
643
  pointer_map_destroy (base_access_vec);
644
}
645
 
646
/* Remove DECL from candidates for SRA and write REASON to the dump file if
647
   there is one.  */
648
static void
649
disqualify_candidate (tree decl, const char *reason)
650
{
651
  bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
652
 
653
  if (dump_file && (dump_flags & TDF_DETAILS))
654
    {
655
      fprintf (dump_file, "! Disqualifying ");
656
      print_generic_expr (dump_file, decl, 0);
657
      fprintf (dump_file, " - %s\n", reason);
658
    }
659
}
660
 
661
/* Return true iff the type contains a field or an element which does not allow
662
   scalarization.  */
663
 
664
static bool
665
type_internals_preclude_sra_p (tree type, const char **msg)
666
{
667
  tree fld;
668
  tree et;
669
 
670
  switch (TREE_CODE (type))
671
    {
672
    case RECORD_TYPE:
673
    case UNION_TYPE:
674
    case QUAL_UNION_TYPE:
675
      for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
676
        if (TREE_CODE (fld) == FIELD_DECL)
677
          {
678
            tree ft = TREE_TYPE (fld);
679
 
680
            if (TREE_THIS_VOLATILE (fld))
681
              {
682
                *msg = "volatile structure field";
683
                return true;
684
              }
685
            if (!DECL_FIELD_OFFSET (fld))
686
              {
687
                *msg = "no structure field offset";
688
                return true;
689
              }
690
            if (!DECL_SIZE (fld))
691
              {
692
                *msg = "zero structure field size";
693
                return true;
694
              }
695
            if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
696
              {
697
                *msg = "structure field offset not fixed";
698
                return true;
699
              }
700
            if (!host_integerp (DECL_SIZE (fld), 1))
701
              {
702
                *msg = "structure field size not fixed";
703
                return true;
704
              }
705
            if (AGGREGATE_TYPE_P (ft)
706
                    && int_bit_position (fld) % BITS_PER_UNIT != 0)
707
              {
708
                *msg = "structure field is bit field";
709
                return true;
710
              }
711
 
712
            if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
713
              return true;
714
          }
715
 
716
      return false;
717
 
718
    case ARRAY_TYPE:
719
      et = TREE_TYPE (type);
720
 
721
      if (TYPE_VOLATILE (et))
722
        {
723
          *msg = "element type is volatile";
724
          return true;
725
        }
726
 
727
      if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
728
        return true;
729
 
730
      return false;
731
 
732
    default:
733
      return false;
734
    }
735
}
736
 
737
/* If T is an SSA_NAME, return NULL if it is not a default def or return its
738
   base variable if it is.  Return T if it is not an SSA_NAME.  */
739
 
740
static tree
741
get_ssa_base_param (tree t)
742
{
743
  if (TREE_CODE (t) == SSA_NAME)
744
    {
745
      if (SSA_NAME_IS_DEFAULT_DEF (t))
746
        return SSA_NAME_VAR (t);
747
      else
748
        return NULL_TREE;
749
    }
750
  return t;
751
}
752
 
753
/* Mark a dereference of BASE of distance DIST in a basic block tht STMT
754
   belongs to, unless the BB has already been marked as a potentially
755
   final.  */
756
 
757
static void
758
mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
759
{
760
  basic_block bb = gimple_bb (stmt);
761
  int idx, parm_index = 0;
762
  tree parm;
763
 
764
  if (bitmap_bit_p (final_bbs, bb->index))
765
    return;
766
 
767
  for (parm = DECL_ARGUMENTS (current_function_decl);
768
       parm && parm != base;
769
       parm = DECL_CHAIN (parm))
770
    parm_index++;
771
 
772
  gcc_assert (parm_index < func_param_count);
773
 
774
  idx = bb->index * func_param_count + parm_index;
775
  if (bb_dereferences[idx] < dist)
776
    bb_dereferences[idx] = dist;
777
}
778
 
779
/* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
780
   the three fields.  Also add it to the vector of accesses corresponding to
781
   the base.  Finally, return the new access.  */
782
 
783
static struct access *
784
create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
785
{
786
  VEC (access_p, heap) *vec;
787
  struct access *access;
788
  void **slot;
789
 
790
  access = (struct access *) pool_alloc (access_pool);
791
  memset (access, 0, sizeof (struct access));
792
  access->base = base;
793
  access->offset = offset;
794
  access->size = size;
795
 
796
  slot = pointer_map_contains (base_access_vec, base);
797
  if (slot)
798
    vec = (VEC (access_p, heap) *) *slot;
799
  else
800
    vec = VEC_alloc (access_p, heap, 32);
801
 
802
  VEC_safe_push (access_p, heap, vec, access);
803
 
804
  *((struct VEC (access_p,heap) **)
805
        pointer_map_insert (base_access_vec, base)) = vec;
806
 
807
  return access;
808
}
809
 
810
/* Create and insert access for EXPR. Return created access, or NULL if it is
811
   not possible.  */
812
 
813
static struct access *
814
create_access (tree expr, gimple stmt, bool write)
815
{
816
  struct access *access;
817
  HOST_WIDE_INT offset, size, max_size;
818
  tree base = expr;
819
  bool ptr, unscalarizable_region = false;
820
 
821
  base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
822
 
823
  if (sra_mode == SRA_MODE_EARLY_IPA
824
      && TREE_CODE (base) == MEM_REF)
825
    {
826
      base = get_ssa_base_param (TREE_OPERAND (base, 0));
827
      if (!base)
828
        return NULL;
829
      ptr = true;
830
    }
831
  else
832
    ptr = false;
833
 
834
  if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
835
    return NULL;
836
 
837
  if (sra_mode == SRA_MODE_EARLY_IPA)
838
    {
839
      if (size < 0 || size != max_size)
840
        {
841
          disqualify_candidate (base, "Encountered a variable sized access.");
842
          return NULL;
843
        }
844
      if (TREE_CODE (expr) == COMPONENT_REF
845
          && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
846
        {
847
          disqualify_candidate (base, "Encountered a bit-field access.");
848
          return NULL;
849
        }
850
      gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
851
 
852
      if (ptr)
853
        mark_parm_dereference (base, offset + size, stmt);
854
    }
855
  else
856
    {
857
      if (size != max_size)
858
        {
859
          size = max_size;
860
          unscalarizable_region = true;
861
        }
862
      if (size < 0)
863
        {
864
          disqualify_candidate (base, "Encountered an unconstrained access.");
865
          return NULL;
866
        }
867
    }
868
 
869
  access = create_access_1 (base, offset, size);
870
  access->expr = expr;
871
  access->type = TREE_TYPE (expr);
872
  access->write = write;
873
  access->grp_unscalarizable_region = unscalarizable_region;
874
  access->stmt = stmt;
875
 
876
  if (TREE_CODE (expr) == COMPONENT_REF
877
      && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
878
    access->non_addressable = 1;
879
 
880
  return access;
881
}
882
 
883
 
884
/* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
885
   register types or (recursively) records with only these two kinds of fields.
886
   It also returns false if any of these records contains a bit-field.  */
887
 
888
static bool
889
type_consists_of_records_p (tree type)
890
{
891
  tree fld;
892
 
893
  if (TREE_CODE (type) != RECORD_TYPE)
894
    return false;
895
 
896
  for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
897
    if (TREE_CODE (fld) == FIELD_DECL)
898
      {
899
        tree ft = TREE_TYPE (fld);
900
 
901
        if (DECL_BIT_FIELD (fld))
902
          return false;
903
 
904
        if (!is_gimple_reg_type (ft)
905
            && !type_consists_of_records_p (ft))
906
          return false;
907
      }
908
 
909
  return true;
910
}
911
 
912
/* Create total_scalarization accesses for all scalar type fields in DECL that
913
   must be of a RECORD_TYPE conforming to type_consists_of_records_p.  BASE
914
   must be the top-most VAR_DECL representing the variable, OFFSET must be the
915
   offset of DECL within BASE.  REF must be the memory reference expression for
916
   the given decl.  */
917
 
918
static void
919
completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
920
                             tree ref)
921
{
922
  tree fld, decl_type = TREE_TYPE (decl);
923
 
924
  for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
925
    if (TREE_CODE (fld) == FIELD_DECL)
926
      {
927
        HOST_WIDE_INT pos = offset + int_bit_position (fld);
928
        tree ft = TREE_TYPE (fld);
929
        tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
930
                            NULL_TREE);
931
 
932
        if (is_gimple_reg_type (ft))
933
          {
934
            struct access *access;
935
            HOST_WIDE_INT size;
936
 
937
            size = tree_low_cst (DECL_SIZE (fld), 1);
938
            access = create_access_1 (base, pos, size);
939
            access->expr = nref;
940
            access->type = ft;
941
            access->grp_total_scalarization = 1;
942
            /* Accesses for intraprocedural SRA can have their stmt NULL.  */
943
          }
944
        else
945
          completely_scalarize_record (base, fld, pos, nref);
946
      }
947
}
948
 
949
/* Create total_scalarization accesses for all scalar type fields in VAR and
950
   for VAR a a whole.  VAR must be of a RECORD_TYPE conforming to
951
   type_consists_of_records_p.   */
952
 
953
static void
954
completely_scalarize_var (tree var)
955
{
956
  HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
957
  struct access *access;
958
 
959
  access = create_access_1 (var, 0, size);
960
  access->expr = var;
961
  access->type = TREE_TYPE (var);
962
  access->grp_total_scalarization = 1;
963
 
964
  completely_scalarize_record (var, var, 0, var);
965
}
966
 
967
/* Search the given tree for a declaration by skipping handled components and
968
   exclude it from the candidates.  */
969
 
970
static void
971
disqualify_base_of_expr (tree t, const char *reason)
972
{
973
  t = get_base_address (t);
974
  if (sra_mode == SRA_MODE_EARLY_IPA
975
      && TREE_CODE (t) == MEM_REF)
976
    t = get_ssa_base_param (TREE_OPERAND (t, 0));
977
 
978
  if (t && DECL_P (t))
979
    disqualify_candidate (t, reason);
980
}
981
 
982
/* Scan expression EXPR and create access structures for all accesses to
983
   candidates for scalarization.  Return the created access or NULL if none is
984
   created.  */
985
 
986
static struct access *
987
build_access_from_expr_1 (tree expr, gimple stmt, bool write)
988
{
989
  struct access *ret = NULL;
990
  bool partial_ref;
991
 
992
  if (TREE_CODE (expr) == BIT_FIELD_REF
993
      || TREE_CODE (expr) == IMAGPART_EXPR
994
      || TREE_CODE (expr) == REALPART_EXPR)
995
    {
996
      expr = TREE_OPERAND (expr, 0);
997
      partial_ref = true;
998
    }
999
  else
1000
    partial_ref = false;
1001
 
1002
  /* We need to dive through V_C_Es in order to get the size of its parameter
1003
     and not the result type.  Ada produces such statements.  We are also
1004
     capable of handling the topmost V_C_E but not any of those buried in other
1005
     handled components.  */
1006
  if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1007
    expr = TREE_OPERAND (expr, 0);
1008
 
1009
  if (contains_view_convert_expr_p (expr))
1010
    {
1011
      disqualify_base_of_expr (expr, "V_C_E under a different handled "
1012
                               "component.");
1013
      return NULL;
1014
    }
1015
 
1016
  switch (TREE_CODE (expr))
1017
    {
1018
    case MEM_REF:
1019
      if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1020
          && sra_mode != SRA_MODE_EARLY_IPA)
1021
        return NULL;
1022
      /* fall through */
1023
    case VAR_DECL:
1024
    case PARM_DECL:
1025
    case RESULT_DECL:
1026
    case COMPONENT_REF:
1027
    case ARRAY_REF:
1028
    case ARRAY_RANGE_REF:
1029
      ret = create_access (expr, stmt, write);
1030
      break;
1031
 
1032
    default:
1033
      break;
1034
    }
1035
 
1036
  if (write && partial_ref && ret)
1037
    ret->grp_partial_lhs = 1;
1038
 
1039
  return ret;
1040
}
1041
 
1042
/* Scan expression EXPR and create access structures for all accesses to
1043
   candidates for scalarization.  Return true if any access has been inserted.
1044
   STMT must be the statement from which the expression is taken, WRITE must be
1045
   true if the expression is a store and false otherwise. */
1046
 
1047
static bool
1048
build_access_from_expr (tree expr, gimple stmt, bool write)
1049
{
1050
  struct access *access;
1051
 
1052
  access = build_access_from_expr_1 (expr, stmt, write);
1053
  if (access)
1054
    {
1055
      /* This means the aggregate is accesses as a whole in a way other than an
1056
         assign statement and thus cannot be removed even if we had a scalar
1057
         replacement for everything.  */
1058
      if (cannot_scalarize_away_bitmap)
1059
        bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1060
      return true;
1061
    }
1062
  return false;
1063
}
1064
 
1065
/* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1066
   modes in which it matters, return true iff they have been disqualified.  RHS
1067
   may be NULL, in that case ignore it.  If we scalarize an aggregate in
1068
   intra-SRA we may need to add statements after each statement.  This is not
1069
   possible if a statement unconditionally has to end the basic block.  */
1070
static bool
1071
disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1072
{
1073
  if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1074
      && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1075
    {
1076
      disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1077
      if (rhs)
1078
        disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1079
      return true;
1080
    }
1081
  return false;
1082
}
1083
 
1084
/* Return true if EXP is a memory reference less aligned than ALIGN.  This is
1085
   invoked only on strict-alignment targets.  */
1086
 
1087
static bool
1088
tree_non_aligned_mem_p (tree exp, unsigned int align)
1089
{
1090
  unsigned int exp_align;
1091
 
1092
  if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
1093
    exp = TREE_OPERAND (exp, 0);
1094
 
1095
  if (TREE_CODE (exp) == SSA_NAME || is_gimple_min_invariant (exp))
1096
    return false;
1097
 
1098
  /* get_object_alignment will fall back to BITS_PER_UNIT if it cannot
1099
     compute an explicit alignment.  Pretend that dereferenced pointers
1100
     are always aligned on strict-alignment targets.  */
1101
  if (TREE_CODE (exp) == MEM_REF || TREE_CODE (exp) == TARGET_MEM_REF)
1102
    exp_align = get_object_or_type_alignment (exp);
1103
  else
1104
    exp_align = get_object_alignment (exp);
1105
 
1106
  if (exp_align < align)
1107
    return true;
1108
 
1109
  return false;
1110
}
1111
 
1112
/* Return true if EXP is a memory reference less aligned than what the access
1113
   ACC would require.  This is invoked only on strict-alignment targets.  */
1114
 
1115
static bool
1116
tree_non_aligned_mem_for_access_p (tree exp, struct access *acc)
1117
{
1118
  unsigned int acc_align;
1119
 
1120
  /* The alignment of the access is that of its expression.  However, it may
1121
     have been artificially increased, e.g. by a local alignment promotion,
1122
     so we cap it to the alignment of the type of the base, on the grounds
1123
     that valid sub-accesses cannot be more aligned than that.  */
1124
  acc_align = get_object_alignment (acc->expr);
1125
  if (acc->base && acc_align > TYPE_ALIGN (TREE_TYPE (acc->base)))
1126
    acc_align = TYPE_ALIGN (TREE_TYPE (acc->base));
1127
 
1128
  return tree_non_aligned_mem_p (exp, acc_align);
1129
}
1130
 
1131
/* Scan expressions occuring in STMT, create access structures for all accesses
1132
   to candidates for scalarization and remove those candidates which occur in
1133
   statements or expressions that prevent them from being split apart.  Return
1134
   true if any access has been inserted.  */
1135
 
1136
static bool
1137
build_accesses_from_assign (gimple stmt)
1138
{
1139
  tree lhs, rhs;
1140
  struct access *lacc, *racc;
1141
 
1142
  if (!gimple_assign_single_p (stmt)
1143
      /* Scope clobbers don't influence scalarization.  */
1144
      || gimple_clobber_p (stmt))
1145
    return false;
1146
 
1147
  lhs = gimple_assign_lhs (stmt);
1148
  rhs = gimple_assign_rhs1 (stmt);
1149
 
1150
  if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1151
    return false;
1152
 
1153
  racc = build_access_from_expr_1 (rhs, stmt, false);
1154
  lacc = build_access_from_expr_1 (lhs, stmt, true);
1155
 
1156
  if (lacc)
1157
    {
1158
      lacc->grp_assignment_write = 1;
1159
      if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (rhs, lacc))
1160
        lacc->grp_unscalarizable_region = 1;
1161
    }
1162
 
1163
  if (racc)
1164
    {
1165
      racc->grp_assignment_read = 1;
1166
      if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1167
          && !is_gimple_reg_type (racc->type))
1168
        bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1169
      if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (lhs, racc))
1170
        racc->grp_unscalarizable_region = 1;
1171
    }
1172
 
1173
  if (lacc && racc
1174
      && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1175
      && !lacc->grp_unscalarizable_region
1176
      && !racc->grp_unscalarizable_region
1177
      && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1178
      /* FIXME: Turn the following line into an assert after PR 40058 is
1179
         fixed.  */
1180
      && lacc->size == racc->size
1181
      && useless_type_conversion_p (lacc->type, racc->type))
1182
    {
1183
      struct assign_link *link;
1184
 
1185
      link = (struct assign_link *) pool_alloc (link_pool);
1186
      memset (link, 0, sizeof (struct assign_link));
1187
 
1188
      link->lacc = lacc;
1189
      link->racc = racc;
1190
 
1191
      add_link_to_rhs (racc, link);
1192
    }
1193
 
1194
  return lacc || racc;
1195
}
1196
 
1197
/* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1198
   GIMPLE_ASM operands with memory constrains which cannot be scalarized.  */
1199
 
1200
static bool
1201
asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1202
                void *data ATTRIBUTE_UNUSED)
1203
{
1204
  op = get_base_address (op);
1205
  if (op
1206
      && DECL_P (op))
1207
    disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1208
 
1209
  return false;
1210
}
1211
 
1212
/* Return true iff callsite CALL has at least as many actual arguments as there
1213
   are formal parameters of the function currently processed by IPA-SRA.  */
1214
 
1215
static inline bool
1216
callsite_has_enough_arguments_p (gimple call)
1217
{
1218
  return gimple_call_num_args (call) >= (unsigned) func_param_count;
1219
}
1220
 
1221
/* Scan function and look for interesting expressions and create access
1222
   structures for them.  Return true iff any access is created.  */
1223
 
1224
static bool
1225
scan_function (void)
1226
{
1227
  basic_block bb;
1228
  bool ret = false;
1229
 
1230
  FOR_EACH_BB (bb)
1231
    {
1232
      gimple_stmt_iterator gsi;
1233
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1234
        {
1235
          gimple stmt = gsi_stmt (gsi);
1236
          tree t;
1237
          unsigned i;
1238
 
1239
          if (final_bbs && stmt_can_throw_external (stmt))
1240
            bitmap_set_bit (final_bbs, bb->index);
1241
          switch (gimple_code (stmt))
1242
            {
1243
            case GIMPLE_RETURN:
1244
              t = gimple_return_retval (stmt);
1245
              if (t != NULL_TREE)
1246
                ret |= build_access_from_expr (t, stmt, false);
1247
              if (final_bbs)
1248
                bitmap_set_bit (final_bbs, bb->index);
1249
              break;
1250
 
1251
            case GIMPLE_ASSIGN:
1252
              ret |= build_accesses_from_assign (stmt);
1253
              break;
1254
 
1255
            case GIMPLE_CALL:
1256
              for (i = 0; i < gimple_call_num_args (stmt); i++)
1257
                ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1258
                                               stmt, false);
1259
 
1260
              if (sra_mode == SRA_MODE_EARLY_IPA)
1261
                {
1262
                  tree dest = gimple_call_fndecl (stmt);
1263
                  int flags = gimple_call_flags (stmt);
1264
 
1265
                  if (dest)
1266
                    {
1267
                      if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1268
                          && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1269
                        encountered_apply_args = true;
1270
                      if (cgraph_get_node (dest)
1271
                          == cgraph_get_node (current_function_decl))
1272
                        {
1273
                          encountered_recursive_call = true;
1274
                          if (!callsite_has_enough_arguments_p (stmt))
1275
                            encountered_unchangable_recursive_call = true;
1276
                        }
1277
                    }
1278
 
1279
                  if (final_bbs
1280
                      && (flags & (ECF_CONST | ECF_PURE)) == 0)
1281
                    bitmap_set_bit (final_bbs, bb->index);
1282
                }
1283
 
1284
              t = gimple_call_lhs (stmt);
1285
              if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1286
                ret |= build_access_from_expr (t, stmt, true);
1287
              break;
1288
 
1289
            case GIMPLE_ASM:
1290
              walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1291
                                             asm_visit_addr);
1292
              if (final_bbs)
1293
                bitmap_set_bit (final_bbs, bb->index);
1294
 
1295
              for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1296
                {
1297
                  t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1298
                  ret |= build_access_from_expr (t, stmt, false);
1299
                }
1300
              for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1301
                {
1302
                  t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1303
                  ret |= build_access_from_expr (t, stmt, true);
1304
                }
1305
              break;
1306
 
1307
            default:
1308
              break;
1309
            }
1310
        }
1311
    }
1312
 
1313
  return ret;
1314
}
1315
 
1316
/* Helper of QSORT function. There are pointers to accesses in the array.  An
1317
   access is considered smaller than another if it has smaller offset or if the
1318
   offsets are the same but is size is bigger. */
1319
 
1320
static int
1321
compare_access_positions (const void *a, const void *b)
1322
{
1323
  const access_p *fp1 = (const access_p *) a;
1324
  const access_p *fp2 = (const access_p *) b;
1325
  const access_p f1 = *fp1;
1326
  const access_p f2 = *fp2;
1327
 
1328
  if (f1->offset != f2->offset)
1329
    return f1->offset < f2->offset ? -1 : 1;
1330
 
1331
  if (f1->size == f2->size)
1332
    {
1333
      if (f1->type == f2->type)
1334
        return 0;
1335
      /* Put any non-aggregate type before any aggregate type.  */
1336
      else if (!is_gimple_reg_type (f1->type)
1337
          && is_gimple_reg_type (f2->type))
1338
        return 1;
1339
      else if (is_gimple_reg_type (f1->type)
1340
               && !is_gimple_reg_type (f2->type))
1341
        return -1;
1342
      /* Put any complex or vector type before any other scalar type.  */
1343
      else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1344
               && TREE_CODE (f1->type) != VECTOR_TYPE
1345
               && (TREE_CODE (f2->type) == COMPLEX_TYPE
1346
                   || TREE_CODE (f2->type) == VECTOR_TYPE))
1347
        return 1;
1348
      else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1349
                || TREE_CODE (f1->type) == VECTOR_TYPE)
1350
               && TREE_CODE (f2->type) != COMPLEX_TYPE
1351
               && TREE_CODE (f2->type) != VECTOR_TYPE)
1352
        return -1;
1353
      /* Put the integral type with the bigger precision first.  */
1354
      else if (INTEGRAL_TYPE_P (f1->type)
1355
               && INTEGRAL_TYPE_P (f2->type))
1356
        return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1357
      /* Put any integral type with non-full precision last.  */
1358
      else if (INTEGRAL_TYPE_P (f1->type)
1359
               && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1360
                   != TYPE_PRECISION (f1->type)))
1361
        return 1;
1362
      else if (INTEGRAL_TYPE_P (f2->type)
1363
               && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1364
                   != TYPE_PRECISION (f2->type)))
1365
        return -1;
1366
      /* Stabilize the sort.  */
1367
      return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1368
    }
1369
 
1370
  /* We want the bigger accesses first, thus the opposite operator in the next
1371
     line: */
1372
  return f1->size > f2->size ? -1 : 1;
1373
}
1374
 
1375
 
1376
/* Append a name of the declaration to the name obstack.  A helper function for
1377
   make_fancy_name.  */
1378
 
1379
static void
1380
make_fancy_decl_name (tree decl)
1381
{
1382
  char buffer[32];
1383
 
1384
  tree name = DECL_NAME (decl);
1385
  if (name)
1386
    obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1387
                  IDENTIFIER_LENGTH (name));
1388
  else
1389
    {
1390
      sprintf (buffer, "D%u", DECL_UID (decl));
1391
      obstack_grow (&name_obstack, buffer, strlen (buffer));
1392
    }
1393
}
1394
 
1395
/* Helper for make_fancy_name.  */
1396
 
1397
static void
1398
make_fancy_name_1 (tree expr)
1399
{
1400
  char buffer[32];
1401
  tree index;
1402
 
1403
  if (DECL_P (expr))
1404
    {
1405
      make_fancy_decl_name (expr);
1406
      return;
1407
    }
1408
 
1409
  switch (TREE_CODE (expr))
1410
    {
1411
    case COMPONENT_REF:
1412
      make_fancy_name_1 (TREE_OPERAND (expr, 0));
1413
      obstack_1grow (&name_obstack, '$');
1414
      make_fancy_decl_name (TREE_OPERAND (expr, 1));
1415
      break;
1416
 
1417
    case ARRAY_REF:
1418
      make_fancy_name_1 (TREE_OPERAND (expr, 0));
1419
      obstack_1grow (&name_obstack, '$');
1420
      /* Arrays with only one element may not have a constant as their
1421
         index. */
1422
      index = TREE_OPERAND (expr, 1);
1423
      if (TREE_CODE (index) != INTEGER_CST)
1424
        break;
1425
      sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1426
      obstack_grow (&name_obstack, buffer, strlen (buffer));
1427
      break;
1428
 
1429
    case ADDR_EXPR:
1430
      make_fancy_name_1 (TREE_OPERAND (expr, 0));
1431
      break;
1432
 
1433
    case MEM_REF:
1434
      make_fancy_name_1 (TREE_OPERAND (expr, 0));
1435
      if (!integer_zerop (TREE_OPERAND (expr, 1)))
1436
        {
1437
          obstack_1grow (&name_obstack, '$');
1438
          sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1439
                   TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1440
          obstack_grow (&name_obstack, buffer, strlen (buffer));
1441
        }
1442
      break;
1443
 
1444
    case BIT_FIELD_REF:
1445
    case REALPART_EXPR:
1446
    case IMAGPART_EXPR:
1447
      gcc_unreachable ();       /* we treat these as scalars.  */
1448
      break;
1449
    default:
1450
      break;
1451
    }
1452
}
1453
 
1454
/* Create a human readable name for replacement variable of ACCESS.  */
1455
 
1456
static char *
1457
make_fancy_name (tree expr)
1458
{
1459
  make_fancy_name_1 (expr);
1460
  obstack_1grow (&name_obstack, '\0');
1461
  return XOBFINISH (&name_obstack, char *);
1462
}
1463
 
1464
/* Construct a MEM_REF that would reference a part of aggregate BASE of type
1465
   EXP_TYPE at the given OFFSET.  If BASE is something for which
1466
   get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1467
   to insert new statements either before or below the current one as specified
1468
   by INSERT_AFTER.  This function is not capable of handling bitfields.  */
1469
 
1470
tree
1471
build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1472
                      tree exp_type, gimple_stmt_iterator *gsi,
1473
                      bool insert_after)
1474
{
1475
  tree prev_base = base;
1476
  tree off;
1477
  HOST_WIDE_INT base_offset;
1478
  unsigned HOST_WIDE_INT misalign;
1479
  unsigned int align;
1480
 
1481
  gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1482
 
1483
  base = get_addr_base_and_unit_offset (base, &base_offset);
1484
 
1485
  /* get_addr_base_and_unit_offset returns NULL for references with a variable
1486
     offset such as array[var_index].  */
1487
  if (!base)
1488
    {
1489
      gimple stmt;
1490
      tree tmp, addr;
1491
 
1492
      gcc_checking_assert (gsi);
1493
      tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1494
      add_referenced_var (tmp);
1495
      tmp = make_ssa_name (tmp, NULL);
1496
      addr = build_fold_addr_expr (unshare_expr (prev_base));
1497
      STRIP_USELESS_TYPE_CONVERSION (addr);
1498
      stmt = gimple_build_assign (tmp, addr);
1499
      gimple_set_location (stmt, loc);
1500
      SSA_NAME_DEF_STMT (tmp) = stmt;
1501
      if (insert_after)
1502
        gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1503
      else
1504
        gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1505
      update_stmt (stmt);
1506
 
1507
      off = build_int_cst (reference_alias_ptr_type (prev_base),
1508
                           offset / BITS_PER_UNIT);
1509
      base = tmp;
1510
    }
1511
  else if (TREE_CODE (base) == MEM_REF)
1512
    {
1513
      off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1514
                           base_offset + offset / BITS_PER_UNIT);
1515
      off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1516
      base = unshare_expr (TREE_OPERAND (base, 0));
1517
    }
1518
  else
1519
    {
1520
      off = build_int_cst (reference_alias_ptr_type (base),
1521
                           base_offset + offset / BITS_PER_UNIT);
1522
      base = build_fold_addr_expr (unshare_expr (base));
1523
    }
1524
 
1525
  /* If prev_base were always an originally performed access
1526
     we can extract more optimistic alignment information
1527
     by looking at the access mode.  That would constrain the
1528
     alignment of base + base_offset which we would need to
1529
     adjust according to offset.  */
1530
  align = get_pointer_alignment_1 (base, &misalign);
1531
  if (misalign == 0
1532
      && (TREE_CODE (prev_base) == MEM_REF
1533
          || TREE_CODE (prev_base) == TARGET_MEM_REF))
1534
    align = MAX (align, TYPE_ALIGN (TREE_TYPE (prev_base)));
1535
  misalign += (double_int_sext (tree_to_double_int (off),
1536
                                TYPE_PRECISION (TREE_TYPE (off))).low
1537
               * BITS_PER_UNIT);
1538
  misalign = misalign & (align - 1);
1539
  if (misalign != 0)
1540
    align = (misalign & -misalign);
1541
  if (align < TYPE_ALIGN (exp_type))
1542
    exp_type = build_aligned_type (exp_type, align);
1543
 
1544
  return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1545
}
1546
 
1547
DEF_VEC_ALLOC_P_STACK (tree);
1548
#define VEC_tree_stack_alloc(alloc) VEC_stack_alloc (tree, alloc)
1549
 
1550
/* Construct a memory reference to a part of an aggregate BASE at the given
1551
   OFFSET and of the type of MODEL.  In case this is a chain of references
1552
   to component, the function will replicate the chain of COMPONENT_REFs of
1553
   the expression of MODEL to access it.  GSI and INSERT_AFTER have the same
1554
   meaning as in build_ref_for_offset.  */
1555
 
1556
static tree
1557
build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1558
                     struct access *model, gimple_stmt_iterator *gsi,
1559
                     bool insert_after)
1560
{
1561
  tree type = model->type, t;
1562
  VEC(tree,stack) *cr_stack = NULL;
1563
 
1564
  if (TREE_CODE (model->expr) == COMPONENT_REF)
1565
    {
1566
      tree expr = model->expr;
1567
 
1568
      /* Create a stack of the COMPONENT_REFs so later we can walk them in
1569
         order from inner to outer.  */
1570
      cr_stack = VEC_alloc (tree, stack, 6);
1571
 
1572
      do {
1573
        tree field = TREE_OPERAND (expr, 1);
1574
        tree cr_offset = component_ref_field_offset (expr);
1575
        HOST_WIDE_INT bit_pos
1576
          = tree_low_cst (cr_offset, 1) * BITS_PER_UNIT
1577
              + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1578
 
1579
        /* We can be called with a model different from the one associated
1580
           with BASE so we need to avoid going up the chain too far.  */
1581
        if (offset - bit_pos < 0)
1582
          break;
1583
 
1584
        offset -= bit_pos;
1585
        VEC_safe_push (tree, stack, cr_stack, expr);
1586
 
1587
        expr = TREE_OPERAND (expr, 0);
1588
        type = TREE_TYPE (expr);
1589
      } while (TREE_CODE (expr) == COMPONENT_REF);
1590
    }
1591
 
1592
  t = build_ref_for_offset (loc, base, offset, type, gsi, insert_after);
1593
 
1594
  if (TREE_CODE (model->expr) == COMPONENT_REF)
1595
    {
1596
      unsigned i;
1597
      tree expr;
1598
 
1599
      /* Now replicate the chain of COMPONENT_REFs from inner to outer.  */
1600
      FOR_EACH_VEC_ELT_REVERSE (tree, cr_stack, i, expr)
1601
        {
1602
          tree field = TREE_OPERAND (expr, 1);
1603
          t = fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (field), t, field,
1604
                               TREE_OPERAND (expr, 2));
1605
        }
1606
 
1607
      VEC_free (tree, stack, cr_stack);
1608
    }
1609
 
1610
  return t;
1611
}
1612
 
1613
/* Construct a memory reference consisting of component_refs and array_refs to
1614
   a part of an aggregate *RES (which is of type TYPE).  The requested part
1615
   should have type EXP_TYPE at be the given OFFSET.  This function might not
1616
   succeed, it returns true when it does and only then *RES points to something
1617
   meaningful.  This function should be used only to build expressions that we
1618
   might need to present to user (e.g. in warnings).  In all other situations,
1619
   build_ref_for_model or build_ref_for_offset should be used instead.  */
1620
 
1621
static bool
1622
build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1623
                                    tree exp_type)
1624
{
1625
  while (1)
1626
    {
1627
      tree fld;
1628
      tree tr_size, index, minidx;
1629
      HOST_WIDE_INT el_size;
1630
 
1631
      if (offset == 0 && exp_type
1632
          && types_compatible_p (exp_type, type))
1633
        return true;
1634
 
1635
      switch (TREE_CODE (type))
1636
        {
1637
        case UNION_TYPE:
1638
        case QUAL_UNION_TYPE:
1639
        case RECORD_TYPE:
1640
          for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1641
            {
1642
              HOST_WIDE_INT pos, size;
1643
              tree expr, *expr_ptr;
1644
 
1645
              if (TREE_CODE (fld) != FIELD_DECL)
1646
                continue;
1647
 
1648
              pos = int_bit_position (fld);
1649
              gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1650
              tr_size = DECL_SIZE (fld);
1651
              if (!tr_size || !host_integerp (tr_size, 1))
1652
                continue;
1653
              size = tree_low_cst (tr_size, 1);
1654
              if (size == 0)
1655
                {
1656
                  if (pos != offset)
1657
                    continue;
1658
                }
1659
              else if (pos > offset || (pos + size) <= offset)
1660
                continue;
1661
 
1662
              expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1663
                             NULL_TREE);
1664
              expr_ptr = &expr;
1665
              if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1666
                                                      offset - pos, exp_type))
1667
                {
1668
                  *res = expr;
1669
                  return true;
1670
                }
1671
            }
1672
          return false;
1673
 
1674
        case ARRAY_TYPE:
1675
          tr_size = TYPE_SIZE (TREE_TYPE (type));
1676
          if (!tr_size || !host_integerp (tr_size, 1))
1677
            return false;
1678
          el_size = tree_low_cst (tr_size, 1);
1679
 
1680
          minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1681
          if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1682
            return false;
1683
          index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1684
          if (!integer_zerop (minidx))
1685
            index = int_const_binop (PLUS_EXPR, index, minidx);
1686
          *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1687
                         NULL_TREE, NULL_TREE);
1688
          offset = offset % el_size;
1689
          type = TREE_TYPE (type);
1690
          break;
1691
 
1692
        default:
1693
          if (offset != 0)
1694
            return false;
1695
 
1696
          if (exp_type)
1697
            return false;
1698
          else
1699
            return true;
1700
        }
1701
    }
1702
}
1703
 
1704
/* Return true iff TYPE is stdarg va_list type.  */
1705
 
1706
static inline bool
1707
is_va_list_type (tree type)
1708
{
1709
  return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1710
}
1711
 
1712
/* Print message to dump file why a variable was rejected. */
1713
 
1714
static void
1715
reject (tree var, const char *msg)
1716
{
1717
  if (dump_file && (dump_flags & TDF_DETAILS))
1718
    {
1719
      fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1720
      print_generic_expr (dump_file, var, 0);
1721
      fprintf (dump_file, "\n");
1722
    }
1723
}
1724
 
1725
/* The very first phase of intraprocedural SRA.  It marks in candidate_bitmap
1726
   those with type which is suitable for scalarization.  */
1727
 
1728
static bool
1729
find_var_candidates (void)
1730
{
1731
  tree var, type;
1732
  referenced_var_iterator rvi;
1733
  bool ret = false;
1734
  const char *msg;
1735
 
1736
  FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
1737
    {
1738
      if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1739
        continue;
1740
      type = TREE_TYPE (var);
1741
 
1742
      if (!AGGREGATE_TYPE_P (type))
1743
        {
1744
          reject (var, "not aggregate");
1745
          continue;
1746
        }
1747
      if (needs_to_live_in_memory (var))
1748
        {
1749
          reject (var, "needs to live in memory");
1750
          continue;
1751
        }
1752
      if (TREE_THIS_VOLATILE (var))
1753
        {
1754
          reject (var, "is volatile");
1755
          continue;
1756
        }
1757
      if (!COMPLETE_TYPE_P (type))
1758
        {
1759
          reject (var, "has incomplete type");
1760
          continue;
1761
        }
1762
      if (!host_integerp (TYPE_SIZE (type), 1))
1763
        {
1764
          reject (var, "type size not fixed");
1765
          continue;
1766
        }
1767
      if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1768
        {
1769
          reject (var, "type size is zero");
1770
          continue;
1771
        }
1772
      if (type_internals_preclude_sra_p (type, &msg))
1773
        {
1774
          reject (var, msg);
1775
          continue;
1776
        }
1777
      if (/* Fix for PR 41089.  tree-stdarg.c needs to have va_lists intact but
1778
              we also want to schedule it rather late.  Thus we ignore it in
1779
              the early pass. */
1780
          (sra_mode == SRA_MODE_EARLY_INTRA
1781
              && is_va_list_type (type)))
1782
        {
1783
          reject (var, "is va_list");
1784
          continue;
1785
        }
1786
 
1787
      bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1788
 
1789
      if (dump_file && (dump_flags & TDF_DETAILS))
1790
        {
1791
          fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1792
          print_generic_expr (dump_file, var, 0);
1793
          fprintf (dump_file, "\n");
1794
        }
1795
      ret = true;
1796
    }
1797
 
1798
  return ret;
1799
}
1800
 
1801
/* Sort all accesses for the given variable, check for partial overlaps and
1802
   return NULL if there are any.  If there are none, pick a representative for
1803
   each combination of offset and size and create a linked list out of them.
1804
   Return the pointer to the first representative and make sure it is the first
1805
   one in the vector of accesses.  */
1806
 
1807
static struct access *
1808
sort_and_splice_var_accesses (tree var)
1809
{
1810
  int i, j, access_count;
1811
  struct access *res, **prev_acc_ptr = &res;
1812
  VEC (access_p, heap) *access_vec;
1813
  bool first = true;
1814
  HOST_WIDE_INT low = -1, high = 0;
1815
 
1816
  access_vec = get_base_access_vector (var);
1817
  if (!access_vec)
1818
    return NULL;
1819
  access_count = VEC_length (access_p, access_vec);
1820
 
1821
  /* Sort by <OFFSET, SIZE>.  */
1822
  VEC_qsort (access_p, access_vec, compare_access_positions);
1823
 
1824
  i = 0;
1825
  while (i < access_count)
1826
    {
1827
      struct access *access = VEC_index (access_p, access_vec, i);
1828
      bool grp_write = access->write;
1829
      bool grp_read = !access->write;
1830
      bool grp_scalar_write = access->write
1831
        && is_gimple_reg_type (access->type);
1832
      bool grp_scalar_read = !access->write
1833
        && is_gimple_reg_type (access->type);
1834
      bool grp_assignment_read = access->grp_assignment_read;
1835
      bool grp_assignment_write = access->grp_assignment_write;
1836
      bool multiple_scalar_reads = false;
1837
      bool total_scalarization = access->grp_total_scalarization;
1838
      bool grp_partial_lhs = access->grp_partial_lhs;
1839
      bool first_scalar = is_gimple_reg_type (access->type);
1840
      bool unscalarizable_region = access->grp_unscalarizable_region;
1841
 
1842
      if (first || access->offset >= high)
1843
        {
1844
          first = false;
1845
          low = access->offset;
1846
          high = access->offset + access->size;
1847
        }
1848
      else if (access->offset > low && access->offset + access->size > high)
1849
        return NULL;
1850
      else
1851
        gcc_assert (access->offset >= low
1852
                    && access->offset + access->size <= high);
1853
 
1854
      j = i + 1;
1855
      while (j < access_count)
1856
        {
1857
          struct access *ac2 = VEC_index (access_p, access_vec, j);
1858
          if (ac2->offset != access->offset || ac2->size != access->size)
1859
            break;
1860
          if (ac2->write)
1861
            {
1862
              grp_write = true;
1863
              grp_scalar_write = (grp_scalar_write
1864
                                  || is_gimple_reg_type (ac2->type));
1865
            }
1866
          else
1867
            {
1868
              grp_read = true;
1869
              if (is_gimple_reg_type (ac2->type))
1870
                {
1871
                  if (grp_scalar_read)
1872
                    multiple_scalar_reads = true;
1873
                  else
1874
                    grp_scalar_read = true;
1875
                }
1876
            }
1877
          grp_assignment_read |= ac2->grp_assignment_read;
1878
          grp_assignment_write |= ac2->grp_assignment_write;
1879
          grp_partial_lhs |= ac2->grp_partial_lhs;
1880
          unscalarizable_region |= ac2->grp_unscalarizable_region;
1881
          total_scalarization |= ac2->grp_total_scalarization;
1882
          relink_to_new_repr (access, ac2);
1883
 
1884
          /* If there are both aggregate-type and scalar-type accesses with
1885
             this combination of size and offset, the comparison function
1886
             should have put the scalars first.  */
1887
          gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1888
          ac2->group_representative = access;
1889
          j++;
1890
        }
1891
 
1892
      i = j;
1893
 
1894
      access->group_representative = access;
1895
      access->grp_write = grp_write;
1896
      access->grp_read = grp_read;
1897
      access->grp_scalar_read = grp_scalar_read;
1898
      access->grp_scalar_write = grp_scalar_write;
1899
      access->grp_assignment_read = grp_assignment_read;
1900
      access->grp_assignment_write = grp_assignment_write;
1901
      access->grp_hint = multiple_scalar_reads || total_scalarization;
1902
      access->grp_total_scalarization = total_scalarization;
1903
      access->grp_partial_lhs = grp_partial_lhs;
1904
      access->grp_unscalarizable_region = unscalarizable_region;
1905
      if (access->first_link)
1906
        add_access_to_work_queue (access);
1907
 
1908
      *prev_acc_ptr = access;
1909
      prev_acc_ptr = &access->next_grp;
1910
    }
1911
 
1912
  gcc_assert (res == VEC_index (access_p, access_vec, 0));
1913
  return res;
1914
}
1915
 
1916
/* Create a variable for the given ACCESS which determines the type, name and a
1917
   few other properties.  Return the variable declaration and store it also to
1918
   ACCESS->replacement.  */
1919
 
1920
static tree
1921
create_access_replacement (struct access *access, bool rename)
1922
{
1923
  tree repl;
1924
 
1925
  repl = create_tmp_var (access->type, "SR");
1926
  add_referenced_var (repl);
1927
  if (rename)
1928
    mark_sym_for_renaming (repl);
1929
 
1930
  if (!access->grp_partial_lhs
1931
      && (TREE_CODE (access->type) == COMPLEX_TYPE
1932
          || TREE_CODE (access->type) == VECTOR_TYPE))
1933
    DECL_GIMPLE_REG_P (repl) = 1;
1934
 
1935
  DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1936
  DECL_ARTIFICIAL (repl) = 1;
1937
  DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1938
 
1939
  if (DECL_NAME (access->base)
1940
      && !DECL_IGNORED_P (access->base)
1941
      && !DECL_ARTIFICIAL (access->base))
1942
    {
1943
      char *pretty_name = make_fancy_name (access->expr);
1944
      tree debug_expr = unshare_expr (access->expr), d;
1945
 
1946
      DECL_NAME (repl) = get_identifier (pretty_name);
1947
      obstack_free (&name_obstack, pretty_name);
1948
 
1949
      /* Get rid of any SSA_NAMEs embedded in debug_expr,
1950
         as DECL_DEBUG_EXPR isn't considered when looking for still
1951
         used SSA_NAMEs and thus they could be freed.  All debug info
1952
         generation cares is whether something is constant or variable
1953
         and that get_ref_base_and_extent works properly on the
1954
         expression.  */
1955
      for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1956
        switch (TREE_CODE (d))
1957
          {
1958
          case ARRAY_REF:
1959
          case ARRAY_RANGE_REF:
1960
            if (TREE_OPERAND (d, 1)
1961
                && TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1962
              TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1963
            if (TREE_OPERAND (d, 3)
1964
                && TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1965
              TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1966
            /* FALLTHRU */
1967
          case COMPONENT_REF:
1968
            if (TREE_OPERAND (d, 2)
1969
                && TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1970
              TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1971
            break;
1972
          default:
1973
            break;
1974
          }
1975
      SET_DECL_DEBUG_EXPR (repl, debug_expr);
1976
      DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1977
      if (access->grp_no_warning)
1978
        TREE_NO_WARNING (repl) = 1;
1979
      else
1980
        TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1981
    }
1982
  else
1983
    TREE_NO_WARNING (repl) = 1;
1984
 
1985
  if (dump_file)
1986
    {
1987
      fprintf (dump_file, "Created a replacement for ");
1988
      print_generic_expr (dump_file, access->base, 0);
1989
      fprintf (dump_file, " offset: %u, size: %u: ",
1990
               (unsigned) access->offset, (unsigned) access->size);
1991
      print_generic_expr (dump_file, repl, 0);
1992
      fprintf (dump_file, "\n");
1993
    }
1994
  sra_stats.replacements++;
1995
 
1996
  return repl;
1997
}
1998
 
1999
/* Return ACCESS scalar replacement, create it if it does not exist yet.  */
2000
 
2001
static inline tree
2002
get_access_replacement (struct access *access)
2003
{
2004
  gcc_assert (access->grp_to_be_replaced);
2005
 
2006
  if (!access->replacement_decl)
2007
    access->replacement_decl = create_access_replacement (access, true);
2008
  return access->replacement_decl;
2009
}
2010
 
2011
/* Return ACCESS scalar replacement, create it if it does not exist yet but do
2012
   not mark it for renaming.  */
2013
 
2014
static inline tree
2015
get_unrenamed_access_replacement (struct access *access)
2016
{
2017
  gcc_assert (!access->grp_to_be_replaced);
2018
 
2019
  if (!access->replacement_decl)
2020
    access->replacement_decl = create_access_replacement (access, false);
2021
  return access->replacement_decl;
2022
}
2023
 
2024
 
2025
/* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2026
   linked list along the way.  Stop when *ACCESS is NULL or the access pointed
2027
   to it is not "within" the root.  Return false iff some accesses partially
2028
   overlap.  */
2029
 
2030
static bool
2031
build_access_subtree (struct access **access)
2032
{
2033
  struct access *root = *access, *last_child = NULL;
2034
  HOST_WIDE_INT limit = root->offset + root->size;
2035
 
2036
  *access = (*access)->next_grp;
2037
  while  (*access && (*access)->offset + (*access)->size <= limit)
2038
    {
2039
      if (!last_child)
2040
        root->first_child = *access;
2041
      else
2042
        last_child->next_sibling = *access;
2043
      last_child = *access;
2044
 
2045
      if (!build_access_subtree (access))
2046
        return false;
2047
    }
2048
 
2049
  if (*access && (*access)->offset < limit)
2050
    return false;
2051
 
2052
  return true;
2053
}
2054
 
2055
/* Build a tree of access representatives, ACCESS is the pointer to the first
2056
   one, others are linked in a list by the next_grp field.  Return false iff
2057
   some accesses partially overlap.  */
2058
 
2059
static bool
2060
build_access_trees (struct access *access)
2061
{
2062
  while (access)
2063
    {
2064
      struct access *root = access;
2065
 
2066
      if (!build_access_subtree (&access))
2067
        return false;
2068
      root->next_grp = access;
2069
    }
2070
  return true;
2071
}
2072
 
2073
/* Return true if expr contains some ARRAY_REFs into a variable bounded
2074
   array.  */
2075
 
2076
static bool
2077
expr_with_var_bounded_array_refs_p (tree expr)
2078
{
2079
  while (handled_component_p (expr))
2080
    {
2081
      if (TREE_CODE (expr) == ARRAY_REF
2082
          && !host_integerp (array_ref_low_bound (expr), 0))
2083
        return true;
2084
      expr = TREE_OPERAND (expr, 0);
2085
    }
2086
  return false;
2087
}
2088
 
2089
/* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2090
   both seeming beneficial and when ALLOW_REPLACEMENTS allows it.  Also set all
2091
   sorts of access flags appropriately along the way, notably always set
2092
   grp_read and grp_assign_read according to MARK_READ and grp_write when
2093
   MARK_WRITE is true.
2094
 
2095
   Creating a replacement for a scalar access is considered beneficial if its
2096
   grp_hint is set (this means we are either attempting total scalarization or
2097
   there is more than one direct read access) or according to the following
2098
   table:
2099
 
2100
   Access written to through a scalar type (once or more times)
2101
   |
2102
   |    Written to in an assignment statement
2103
   |    |
2104
   |    |       Access read as scalar _once_
2105
   |    |       |
2106
   |    |       |       Read in an assignment statement
2107
   |    |       |       |
2108
   |    |       |       |       Scalarize       Comment
2109
-----------------------------------------------------------------------------
2110
 
2111
 
2112
 
2113
 
2114
 
2115
 
2116
 
2117
 
2118
   1    0        0        0        No              Won't help
2119
   1    0        0        1       Yes             s.i = 1; *g = s;
2120
   1    0        1       0        Yes             s.i = 5; g = s.i;
2121
   1    0        1       1       Yes             The same case as above
2122
   1    1       0        0        No              Won't help.
2123
   1    1       0        1       Yes             s.i = 1; *g = s;
2124
   1    1       1       0        Yes             s = *g; return s.i;
2125
   1    1       1       1       Yes             Any of the above yeses  */
2126
 
2127
static bool
2128
analyze_access_subtree (struct access *root, struct access *parent,
2129
                        bool allow_replacements)
2130
{
2131
  struct access *child;
2132
  HOST_WIDE_INT limit = root->offset + root->size;
2133
  HOST_WIDE_INT covered_to = root->offset;
2134
  bool scalar = is_gimple_reg_type (root->type);
2135
  bool hole = false, sth_created = false;
2136
 
2137
  if (parent)
2138
    {
2139
      if (parent->grp_read)
2140
        root->grp_read = 1;
2141
      if (parent->grp_assignment_read)
2142
        root->grp_assignment_read = 1;
2143
      if (parent->grp_write)
2144
        root->grp_write = 1;
2145
      if (parent->grp_assignment_write)
2146
        root->grp_assignment_write = 1;
2147
      if (parent->grp_total_scalarization)
2148
        root->grp_total_scalarization = 1;
2149
    }
2150
 
2151
  if (root->grp_unscalarizable_region)
2152
    allow_replacements = false;
2153
 
2154
  if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2155
    allow_replacements = false;
2156
 
2157
  for (child = root->first_child; child; child = child->next_sibling)
2158
    {
2159
      hole |= covered_to < child->offset;
2160
      sth_created |= analyze_access_subtree (child, root,
2161
                                             allow_replacements && !scalar);
2162
 
2163
      root->grp_unscalarized_data |= child->grp_unscalarized_data;
2164
      root->grp_total_scalarization &= child->grp_total_scalarization;
2165
      if (child->grp_covered)
2166
        covered_to += child->size;
2167
      else
2168
        hole = true;
2169
    }
2170
 
2171
  if (allow_replacements && scalar && !root->first_child
2172
      && (root->grp_hint
2173
          || ((root->grp_scalar_read || root->grp_assignment_read)
2174
              && (root->grp_scalar_write || root->grp_assignment_write))))
2175
    {
2176
      bool new_integer_type;
2177
      /* Always create access replacements that cover the whole access.
2178
         For integral types this means the precision has to match.
2179
         Avoid assumptions based on the integral type kind, too.  */
2180
      if (INTEGRAL_TYPE_P (root->type)
2181
          && (TREE_CODE (root->type) != INTEGER_TYPE
2182
              || TYPE_PRECISION (root->type) != root->size)
2183
          /* But leave bitfield accesses alone.  */
2184
          && (root->offset % BITS_PER_UNIT) == 0)
2185
        {
2186
          tree rt = root->type;
2187
          root->type = build_nonstandard_integer_type (root->size,
2188
                                                       TYPE_UNSIGNED (rt));
2189
          root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2190
                                             root->base, root->offset,
2191
                                             root->type, NULL, false);
2192
          new_integer_type = true;
2193
        }
2194
      else
2195
        new_integer_type = false;
2196
 
2197
      if (dump_file && (dump_flags & TDF_DETAILS))
2198
        {
2199
          fprintf (dump_file, "Marking ");
2200
          print_generic_expr (dump_file, root->base, 0);
2201
          fprintf (dump_file, " offset: %u, size: %u ",
2202
                   (unsigned) root->offset, (unsigned) root->size);
2203
          fprintf (dump_file, " to be replaced%s.\n",
2204
                   new_integer_type ? " with an integer": "");
2205
        }
2206
 
2207
      root->grp_to_be_replaced = 1;
2208
      sth_created = true;
2209
      hole = false;
2210
    }
2211
  else
2212
    {
2213
      if (covered_to < limit)
2214
        hole = true;
2215
      if (scalar)
2216
        root->grp_total_scalarization = 0;
2217
    }
2218
 
2219
  if (sth_created
2220
      && (!hole || root->grp_total_scalarization))
2221
    {
2222
      root->grp_covered = 1;
2223
      return true;
2224
    }
2225
  if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2226
    root->grp_unscalarized_data = 1; /* not covered and written to */
2227
  if (sth_created)
2228
    return true;
2229
  return false;
2230
}
2231
 
2232
/* Analyze all access trees linked by next_grp by the means of
2233
   analyze_access_subtree.  */
2234
static bool
2235
analyze_access_trees (struct access *access)
2236
{
2237
  bool ret = false;
2238
 
2239
  while (access)
2240
    {
2241
      if (analyze_access_subtree (access, NULL, true))
2242
        ret = true;
2243
      access = access->next_grp;
2244
    }
2245
 
2246
  return ret;
2247
}
2248
 
2249
/* Return true iff a potential new child of LACC at offset OFFSET and with size
2250
   SIZE would conflict with an already existing one.  If exactly such a child
2251
   already exists in LACC, store a pointer to it in EXACT_MATCH.  */
2252
 
2253
static bool
2254
child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2255
                              HOST_WIDE_INT size, struct access **exact_match)
2256
{
2257
  struct access *child;
2258
 
2259
  for (child = lacc->first_child; child; child = child->next_sibling)
2260
    {
2261
      if (child->offset == norm_offset && child->size == size)
2262
        {
2263
          *exact_match = child;
2264
          return true;
2265
        }
2266
 
2267
      if (child->offset < norm_offset + size
2268
          && child->offset + child->size > norm_offset)
2269
        return true;
2270
    }
2271
 
2272
  return false;
2273
}
2274
 
2275
/* Create a new child access of PARENT, with all properties just like MODEL
2276
   except for its offset and with its grp_write false and grp_read true.
2277
   Return the new access or NULL if it cannot be created.  Note that this access
2278
   is created long after all splicing and sorting, it's not located in any
2279
   access vector and is automatically a representative of its group.  */
2280
 
2281
static struct access *
2282
create_artificial_child_access (struct access *parent, struct access *model,
2283
                                HOST_WIDE_INT new_offset)
2284
{
2285
  struct access *access;
2286
  struct access **child;
2287
  tree expr = parent->base;
2288
 
2289
  gcc_assert (!model->grp_unscalarizable_region);
2290
 
2291
  access = (struct access *) pool_alloc (access_pool);
2292
  memset (access, 0, sizeof (struct access));
2293
  if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2294
                                           model->type))
2295
    {
2296
      access->grp_no_warning = true;
2297
      expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2298
                                  new_offset, model, NULL, false);
2299
    }
2300
 
2301
  access->base = parent->base;
2302
  access->expr = expr;
2303
  access->offset = new_offset;
2304
  access->size = model->size;
2305
  access->type = model->type;
2306
  access->grp_write = true;
2307
  access->grp_read = false;
2308
 
2309
  child = &parent->first_child;
2310
  while (*child && (*child)->offset < new_offset)
2311
    child = &(*child)->next_sibling;
2312
 
2313
  access->next_sibling = *child;
2314
  *child = access;
2315
 
2316
  return access;
2317
}
2318
 
2319
 
2320
/* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2321
   true if any new subaccess was created.  Additionally, if RACC is a scalar
2322
   access but LACC is not, change the type of the latter, if possible.  */
2323
 
2324
static bool
2325
propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2326
{
2327
  struct access *rchild;
2328
  HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2329
  bool ret = false;
2330
 
2331
  if (is_gimple_reg_type (lacc->type)
2332
      || lacc->grp_unscalarizable_region
2333
      || racc->grp_unscalarizable_region)
2334
    return false;
2335
 
2336
  if (is_gimple_reg_type (racc->type))
2337
    {
2338
      if (!lacc->first_child && !racc->first_child)
2339
        {
2340
          tree t = lacc->base;
2341
 
2342
          lacc->type = racc->type;
2343
          if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2344
                                                  lacc->offset, racc->type))
2345
            lacc->expr = t;
2346
          else
2347
            {
2348
              lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2349
                                                lacc->base, lacc->offset,
2350
                                                racc, NULL, false);
2351
              lacc->grp_no_warning = true;
2352
            }
2353
        }
2354
      return false;
2355
    }
2356
 
2357
  for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2358
    {
2359
      struct access *new_acc = NULL;
2360
      HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2361
 
2362
      if (rchild->grp_unscalarizable_region)
2363
        continue;
2364
 
2365
      if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2366
                                        &new_acc))
2367
        {
2368
          if (new_acc)
2369
            {
2370
              rchild->grp_hint = 1;
2371
              new_acc->grp_hint |= new_acc->grp_read;
2372
              if (rchild->first_child)
2373
                ret |= propagate_subaccesses_across_link (new_acc, rchild);
2374
            }
2375
          continue;
2376
        }
2377
 
2378
      rchild->grp_hint = 1;
2379
      new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2380
      if (new_acc)
2381
        {
2382
          ret = true;
2383
          if (racc->first_child)
2384
            propagate_subaccesses_across_link (new_acc, rchild);
2385
        }
2386
    }
2387
 
2388
  return ret;
2389
}
2390
 
2391
/* Propagate all subaccesses across assignment links.  */
2392
 
2393
static void
2394
propagate_all_subaccesses (void)
2395
{
2396
  while (work_queue_head)
2397
    {
2398
      struct access *racc = pop_access_from_work_queue ();
2399
      struct assign_link *link;
2400
 
2401
      gcc_assert (racc->first_link);
2402
 
2403
      for (link = racc->first_link; link; link = link->next)
2404
        {
2405
          struct access *lacc = link->lacc;
2406
 
2407
          if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2408
            continue;
2409
          lacc = lacc->group_representative;
2410
          if (propagate_subaccesses_across_link (lacc, racc)
2411
              && lacc->first_link)
2412
            add_access_to_work_queue (lacc);
2413
        }
2414
    }
2415
}
2416
 
2417
/* Go through all accesses collected throughout the (intraprocedural) analysis
2418
   stage, exclude overlapping ones, identify representatives and build trees
2419
   out of them, making decisions about scalarization on the way.  Return true
2420
   iff there are any to-be-scalarized variables after this stage. */
2421
 
2422
static bool
2423
analyze_all_variable_accesses (void)
2424
{
2425
  int res = 0;
2426
  bitmap tmp = BITMAP_ALLOC (NULL);
2427
  bitmap_iterator bi;
2428
  unsigned i, max_total_scalarization_size;
2429
 
2430
  max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2431
    * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2432
 
2433
  EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2434
    if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2435
        && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2436
      {
2437
        tree var = referenced_var (i);
2438
 
2439
        if (TREE_CODE (var) == VAR_DECL
2440
            && type_consists_of_records_p (TREE_TYPE (var)))
2441
          {
2442
            if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2443
                <= max_total_scalarization_size)
2444
              {
2445
                completely_scalarize_var (var);
2446
                if (dump_file && (dump_flags & TDF_DETAILS))
2447
                  {
2448
                    fprintf (dump_file, "Will attempt to totally scalarize ");
2449
                    print_generic_expr (dump_file, var, 0);
2450
                    fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2451
                  }
2452
              }
2453
            else if (dump_file && (dump_flags & TDF_DETAILS))
2454
              {
2455
                fprintf (dump_file, "Too big to totally scalarize: ");
2456
                print_generic_expr (dump_file, var, 0);
2457
                fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2458
              }
2459
          }
2460
      }
2461
 
2462
  bitmap_copy (tmp, candidate_bitmap);
2463
  EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2464
    {
2465
      tree var = referenced_var (i);
2466
      struct access *access;
2467
 
2468
      access = sort_and_splice_var_accesses (var);
2469
      if (!access || !build_access_trees (access))
2470
        disqualify_candidate (var,
2471
                              "No or inhibitingly overlapping accesses.");
2472
    }
2473
 
2474
  propagate_all_subaccesses ();
2475
 
2476
  bitmap_copy (tmp, candidate_bitmap);
2477
  EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2478
    {
2479
      tree var = referenced_var (i);
2480
      struct access *access = get_first_repr_for_decl (var);
2481
 
2482
      if (analyze_access_trees (access))
2483
        {
2484
          res++;
2485
          if (dump_file && (dump_flags & TDF_DETAILS))
2486
            {
2487
              fprintf (dump_file, "\nAccess trees for ");
2488
              print_generic_expr (dump_file, var, 0);
2489
              fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2490
              dump_access_tree (dump_file, access);
2491
              fprintf (dump_file, "\n");
2492
            }
2493
        }
2494
      else
2495
        disqualify_candidate (var, "No scalar replacements to be created.");
2496
    }
2497
 
2498
  BITMAP_FREE (tmp);
2499
 
2500
  if (res)
2501
    {
2502
      statistics_counter_event (cfun, "Scalarized aggregates", res);
2503
      return true;
2504
    }
2505
  else
2506
    return false;
2507
}
2508
 
2509
/* Generate statements copying scalar replacements of accesses within a subtree
2510
   into or out of AGG.  ACCESS, all its children, siblings and their children
2511
   are to be processed.  AGG is an aggregate type expression (can be a
2512
   declaration but does not have to be, it can for example also be a mem_ref or
2513
   a series of handled components).  TOP_OFFSET is the offset of the processed
2514
   subtree which has to be subtracted from offsets of individual accesses to
2515
   get corresponding offsets for AGG.  If CHUNK_SIZE is non-null, copy only
2516
   replacements in the interval <start_offset, start_offset + chunk_size>,
2517
   otherwise copy all.  GSI is a statement iterator used to place the new
2518
   statements.  WRITE should be true when the statements should write from AGG
2519
   to the replacement and false if vice versa.  if INSERT_AFTER is true, new
2520
   statements will be added after the current statement in GSI, they will be
2521
   added before the statement otherwise.  */
2522
 
2523
static void
2524
generate_subtree_copies (struct access *access, tree agg,
2525
                         HOST_WIDE_INT top_offset,
2526
                         HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2527
                         gimple_stmt_iterator *gsi, bool write,
2528
                         bool insert_after, location_t loc)
2529
{
2530
  do
2531
    {
2532
      if (chunk_size && access->offset >= start_offset + chunk_size)
2533
        return;
2534
 
2535
      if (access->grp_to_be_replaced
2536
          && (chunk_size == 0
2537
              || access->offset + access->size > start_offset))
2538
        {
2539
          tree expr, repl = get_access_replacement (access);
2540
          gimple stmt;
2541
 
2542
          expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2543
                                      access, gsi, insert_after);
2544
 
2545
          if (write)
2546
            {
2547
              if (access->grp_partial_lhs)
2548
                expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2549
                                                 !insert_after,
2550
                                                 insert_after ? GSI_NEW_STMT
2551
                                                 : GSI_SAME_STMT);
2552
              stmt = gimple_build_assign (repl, expr);
2553
            }
2554
          else
2555
            {
2556
              TREE_NO_WARNING (repl) = 1;
2557
              if (access->grp_partial_lhs)
2558
                repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2559
                                                 !insert_after,
2560
                                                 insert_after ? GSI_NEW_STMT
2561
                                                 : GSI_SAME_STMT);
2562
              stmt = gimple_build_assign (expr, repl);
2563
            }
2564
          gimple_set_location (stmt, loc);
2565
 
2566
          if (insert_after)
2567
            gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2568
          else
2569
            gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2570
          update_stmt (stmt);
2571
          sra_stats.subtree_copies++;
2572
        }
2573
 
2574
      if (access->first_child)
2575
        generate_subtree_copies (access->first_child, agg, top_offset,
2576
                                 start_offset, chunk_size, gsi,
2577
                                 write, insert_after, loc);
2578
 
2579
      access = access->next_sibling;
2580
    }
2581
  while (access);
2582
}
2583
 
2584
/* Assign zero to all scalar replacements in an access subtree.  ACCESS is the
2585
   the root of the subtree to be processed.  GSI is the statement iterator used
2586
   for inserting statements which are added after the current statement if
2587
   INSERT_AFTER is true or before it otherwise.  */
2588
 
2589
static void
2590
init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2591
                        bool insert_after, location_t loc)
2592
 
2593
{
2594
  struct access *child;
2595
 
2596
  if (access->grp_to_be_replaced)
2597
    {
2598
      gimple stmt;
2599
 
2600
      stmt = gimple_build_assign (get_access_replacement (access),
2601
                                  build_zero_cst (access->type));
2602
      if (insert_after)
2603
        gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2604
      else
2605
        gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2606
      update_stmt (stmt);
2607
      gimple_set_location (stmt, loc);
2608
    }
2609
 
2610
  for (child = access->first_child; child; child = child->next_sibling)
2611
    init_subtree_with_zero (child, gsi, insert_after, loc);
2612
}
2613
 
2614
/* Search for an access representative for the given expression EXPR and
2615
   return it or NULL if it cannot be found.  */
2616
 
2617
static struct access *
2618
get_access_for_expr (tree expr)
2619
{
2620
  HOST_WIDE_INT offset, size, max_size;
2621
  tree base;
2622
 
2623
  /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2624
     a different size than the size of its argument and we need the latter
2625
     one.  */
2626
  if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2627
    expr = TREE_OPERAND (expr, 0);
2628
 
2629
  base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2630
  if (max_size == -1 || !DECL_P (base))
2631
    return NULL;
2632
 
2633
  if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2634
    return NULL;
2635
 
2636
  return get_var_base_offset_size_access (base, offset, max_size);
2637
}
2638
 
2639
/* Replace the expression EXPR with a scalar replacement if there is one and
2640
   generate other statements to do type conversion or subtree copying if
2641
   necessary.  GSI is used to place newly created statements, WRITE is true if
2642
   the expression is being written to (it is on a LHS of a statement or output
2643
   in an assembly statement).  */
2644
 
2645
static bool
2646
sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2647
{
2648
  location_t loc;
2649
  struct access *access;
2650
  tree type, bfr;
2651
 
2652
  if (TREE_CODE (*expr) == BIT_FIELD_REF)
2653
    {
2654
      bfr = *expr;
2655
      expr = &TREE_OPERAND (*expr, 0);
2656
    }
2657
  else
2658
    bfr = NULL_TREE;
2659
 
2660
  if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2661
    expr = &TREE_OPERAND (*expr, 0);
2662
  access = get_access_for_expr (*expr);
2663
  if (!access)
2664
    return false;
2665
  type = TREE_TYPE (*expr);
2666
 
2667
  loc = gimple_location (gsi_stmt (*gsi));
2668
  if (access->grp_to_be_replaced)
2669
    {
2670
      tree repl = get_access_replacement (access);
2671
      /* If we replace a non-register typed access simply use the original
2672
         access expression to extract the scalar component afterwards.
2673
         This happens if scalarizing a function return value or parameter
2674
         like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2675
         gcc.c-torture/compile/20011217-1.c.
2676
 
2677
         We also want to use this when accessing a complex or vector which can
2678
         be accessed as a different type too, potentially creating a need for
2679
         type conversion (see PR42196) and when scalarized unions are involved
2680
         in assembler statements (see PR42398).  */
2681
      if (!useless_type_conversion_p (type, access->type))
2682
        {
2683
          tree ref;
2684
 
2685
          ref = build_ref_for_model (loc, access->base, access->offset, access,
2686
                                     NULL, false);
2687
 
2688
          if (write)
2689
            {
2690
              gimple stmt;
2691
 
2692
              if (access->grp_partial_lhs)
2693
                ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2694
                                                 false, GSI_NEW_STMT);
2695
              stmt = gimple_build_assign (repl, ref);
2696
              gimple_set_location (stmt, loc);
2697
              gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2698
            }
2699
          else
2700
            {
2701
              gimple stmt;
2702
 
2703
              if (access->grp_partial_lhs)
2704
                repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2705
                                                 true, GSI_SAME_STMT);
2706
              stmt = gimple_build_assign (ref, repl);
2707
              gimple_set_location (stmt, loc);
2708
              gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2709
            }
2710
        }
2711
      else
2712
        *expr = repl;
2713
      sra_stats.exprs++;
2714
    }
2715
 
2716
  if (access->first_child)
2717
    {
2718
      HOST_WIDE_INT start_offset, chunk_size;
2719
      if (bfr
2720
          && host_integerp (TREE_OPERAND (bfr, 1), 1)
2721
          && host_integerp (TREE_OPERAND (bfr, 2), 1))
2722
        {
2723
          chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2724
          start_offset = access->offset
2725
            + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2726
        }
2727
      else
2728
        start_offset = chunk_size = 0;
2729
 
2730
      generate_subtree_copies (access->first_child, access->base, 0,
2731
                               start_offset, chunk_size, gsi, write, write,
2732
                               loc);
2733
    }
2734
  return true;
2735
}
2736
 
2737
/* Where scalar replacements of the RHS have been written to when a replacement
2738
   of a LHS of an assigments cannot be direclty loaded from a replacement of
2739
   the RHS. */
2740
enum unscalarized_data_handling { SRA_UDH_NONE,  /* Nothing done so far. */
2741
                                  SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2742
                                  SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2743
 
2744
/* Store all replacements in the access tree rooted in TOP_RACC either to their
2745
   base aggregate if there are unscalarized data or directly to LHS of the
2746
   statement that is pointed to by GSI otherwise.  */
2747
 
2748
static enum unscalarized_data_handling
2749
handle_unscalarized_data_in_subtree (struct access *top_racc,
2750
                                     gimple_stmt_iterator *gsi)
2751
{
2752
  if (top_racc->grp_unscalarized_data)
2753
    {
2754
      generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2755
                               gsi, false, false,
2756
                               gimple_location (gsi_stmt (*gsi)));
2757
      return SRA_UDH_RIGHT;
2758
    }
2759
  else
2760
    {
2761
      tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2762
      generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2763
                               0, 0, gsi, false, false,
2764
                               gimple_location (gsi_stmt (*gsi)));
2765
      return SRA_UDH_LEFT;
2766
    }
2767
}
2768
 
2769
 
2770
/* Try to generate statements to load all sub-replacements in an access subtree
2771
   formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2772
   If that is not possible, refresh the TOP_RACC base aggregate and load the
2773
   accesses from it.  LEFT_OFFSET is the offset of the left whole subtree being
2774
   copied. NEW_GSI is stmt iterator used for statement insertions after the
2775
   original assignment, OLD_GSI is used to insert statements before the
2776
   assignment.  *REFRESHED keeps the information whether we have needed to
2777
   refresh replacements of the LHS and from which side of the assignments this
2778
   takes place.  */
2779
 
2780
static void
2781
load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2782
                                 HOST_WIDE_INT left_offset,
2783
                                 gimple_stmt_iterator *old_gsi,
2784
                                 gimple_stmt_iterator *new_gsi,
2785
                                 enum unscalarized_data_handling *refreshed)
2786
{
2787
  location_t loc = gimple_location (gsi_stmt (*old_gsi));
2788
  for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2789
    {
2790
      if (lacc->grp_to_be_replaced)
2791
        {
2792
          struct access *racc;
2793
          HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2794
          gimple stmt;
2795
          tree rhs;
2796
 
2797
          racc = find_access_in_subtree (top_racc, offset, lacc->size);
2798
          if (racc && racc->grp_to_be_replaced)
2799
            {
2800
              rhs = get_access_replacement (racc);
2801
              if (!useless_type_conversion_p (lacc->type, racc->type))
2802
                rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2803
 
2804
              if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2805
                rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
2806
                                                true, GSI_SAME_STMT);
2807
            }
2808
          else
2809
            {
2810
              /* No suitable access on the right hand side, need to load from
2811
                 the aggregate.  See if we have to update it first... */
2812
              if (*refreshed == SRA_UDH_NONE)
2813
                *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2814
                                                                  old_gsi);
2815
 
2816
              if (*refreshed == SRA_UDH_LEFT)
2817
                rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2818
                                            new_gsi, true);
2819
              else
2820
                rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2821
                                            new_gsi, true);
2822
              if (lacc->grp_partial_lhs)
2823
                rhs = force_gimple_operand_gsi (new_gsi, rhs, true, NULL_TREE,
2824
                                                false, GSI_NEW_STMT);
2825
            }
2826
 
2827
          stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2828
          gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2829
          gimple_set_location (stmt, loc);
2830
          update_stmt (stmt);
2831
          sra_stats.subreplacements++;
2832
        }
2833
      else if (*refreshed == SRA_UDH_NONE
2834
               && lacc->grp_read && !lacc->grp_covered)
2835
        *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2836
                                                          old_gsi);
2837
 
2838
      if (lacc->first_child)
2839
        load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2840
                                         old_gsi, new_gsi, refreshed);
2841
    }
2842
}
2843
 
2844
/* Result code for SRA assignment modification.  */
2845
enum assignment_mod_result { SRA_AM_NONE,       /* nothing done for the stmt */
2846
                             SRA_AM_MODIFIED,  /* stmt changed but not
2847
                                                  removed */
2848
                             SRA_AM_REMOVED };  /* stmt eliminated */
2849
 
2850
/* Modify assignments with a CONSTRUCTOR on their RHS.  STMT contains a pointer
2851
   to the assignment and GSI is the statement iterator pointing at it.  Returns
2852
   the same values as sra_modify_assign.  */
2853
 
2854
static enum assignment_mod_result
2855
sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2856
{
2857
  tree lhs = gimple_assign_lhs (*stmt);
2858
  struct access *acc;
2859
  location_t loc;
2860
 
2861
  acc = get_access_for_expr (lhs);
2862
  if (!acc)
2863
    return SRA_AM_NONE;
2864
 
2865
  if (gimple_clobber_p (*stmt))
2866
    {
2867
      /* Remove clobbers of fully scalarized variables, otherwise
2868
         do nothing.  */
2869
      if (acc->grp_covered)
2870
        {
2871
          unlink_stmt_vdef (*stmt);
2872
          gsi_remove (gsi, true);
2873
          return SRA_AM_REMOVED;
2874
        }
2875
      else
2876
        return SRA_AM_NONE;
2877
    }
2878
 
2879
  loc = gimple_location (*stmt);
2880
  if (VEC_length (constructor_elt,
2881
                  CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2882
    {
2883
      /* I have never seen this code path trigger but if it can happen the
2884
         following should handle it gracefully.  */
2885
      if (access_has_children_p (acc))
2886
        generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2887
                                 true, true, loc);
2888
      return SRA_AM_MODIFIED;
2889
    }
2890
 
2891
  if (acc->grp_covered)
2892
    {
2893
      init_subtree_with_zero (acc, gsi, false, loc);
2894
      unlink_stmt_vdef (*stmt);
2895
      gsi_remove (gsi, true);
2896
      return SRA_AM_REMOVED;
2897
    }
2898
  else
2899
    {
2900
      init_subtree_with_zero (acc, gsi, true, loc);
2901
      return SRA_AM_MODIFIED;
2902
    }
2903
}
2904
 
2905
/* Create and return a new suitable default definition SSA_NAME for RACC which
2906
   is an access describing an uninitialized part of an aggregate that is being
2907
   loaded.  */
2908
 
2909
static tree
2910
get_repl_default_def_ssa_name (struct access *racc)
2911
{
2912
  tree repl, decl;
2913
 
2914
  decl = get_unrenamed_access_replacement (racc);
2915
 
2916
  repl = gimple_default_def (cfun, decl);
2917
  if (!repl)
2918
    {
2919
      repl = make_ssa_name (decl, gimple_build_nop ());
2920
      set_default_def (decl, repl);
2921
    }
2922
 
2923
  return repl;
2924
}
2925
 
2926
/* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2927
   somewhere in it.  */
2928
 
2929
static inline bool
2930
contains_bitfld_comp_ref_p (const_tree ref)
2931
{
2932
  while (handled_component_p (ref))
2933
    {
2934
      if (TREE_CODE (ref) == COMPONENT_REF
2935
          && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
2936
        return true;
2937
      ref = TREE_OPERAND (ref, 0);
2938
    }
2939
 
2940
  return false;
2941
}
2942
 
2943
/* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2944
   bit-field field declaration somewhere in it.  */
2945
 
2946
static inline bool
2947
contains_vce_or_bfcref_p (const_tree ref)
2948
{
2949
  while (handled_component_p (ref))
2950
    {
2951
      if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2952
          || (TREE_CODE (ref) == COMPONENT_REF
2953
              && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
2954
        return true;
2955
      ref = TREE_OPERAND (ref, 0);
2956
    }
2957
 
2958
  return false;
2959
}
2960
 
2961
/* Examine both sides of the assignment statement pointed to by STMT, replace
2962
   them with a scalare replacement if there is one and generate copying of
2963
   replacements if scalarized aggregates have been used in the assignment.  GSI
2964
   is used to hold generated statements for type conversions and subtree
2965
   copying.  */
2966
 
2967
static enum assignment_mod_result
2968
sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2969
{
2970
  struct access *lacc, *racc;
2971
  tree lhs, rhs;
2972
  bool modify_this_stmt = false;
2973
  bool force_gimple_rhs = false;
2974
  location_t loc;
2975
  gimple_stmt_iterator orig_gsi = *gsi;
2976
 
2977
  if (!gimple_assign_single_p (*stmt))
2978
    return SRA_AM_NONE;
2979
  lhs = gimple_assign_lhs (*stmt);
2980
  rhs = gimple_assign_rhs1 (*stmt);
2981
 
2982
  if (TREE_CODE (rhs) == CONSTRUCTOR)
2983
    return sra_modify_constructor_assign (stmt, gsi);
2984
 
2985
  if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2986
      || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2987
      || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2988
    {
2989
      modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2990
                                          gsi, false);
2991
      modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2992
                                           gsi, true);
2993
      return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2994
    }
2995
 
2996
  lacc = get_access_for_expr (lhs);
2997
  racc = get_access_for_expr (rhs);
2998
  if (!lacc && !racc)
2999
    return SRA_AM_NONE;
3000
 
3001
  loc = gimple_location (*stmt);
3002
  if (lacc && lacc->grp_to_be_replaced)
3003
    {
3004
      lhs = get_access_replacement (lacc);
3005
      gimple_assign_set_lhs (*stmt, lhs);
3006
      modify_this_stmt = true;
3007
      if (lacc->grp_partial_lhs)
3008
        force_gimple_rhs = true;
3009
      sra_stats.exprs++;
3010
    }
3011
 
3012
  if (racc && racc->grp_to_be_replaced)
3013
    {
3014
      rhs = get_access_replacement (racc);
3015
      modify_this_stmt = true;
3016
      if (racc->grp_partial_lhs)
3017
        force_gimple_rhs = true;
3018
      sra_stats.exprs++;
3019
    }
3020
  else if (racc
3021
           && !racc->grp_unscalarized_data
3022
           && TREE_CODE (lhs) == SSA_NAME
3023
           && !access_has_replacements_p (racc))
3024
    {
3025
      rhs = get_repl_default_def_ssa_name (racc);
3026
      modify_this_stmt = true;
3027
      sra_stats.exprs++;
3028
    }
3029
 
3030
  if (modify_this_stmt)
3031
    {
3032
      if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3033
        {
3034
          /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3035
             ???  This should move to fold_stmt which we simply should
3036
             call after building a VIEW_CONVERT_EXPR here.  */
3037
          if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3038
              && !contains_bitfld_comp_ref_p (lhs)
3039
              && !access_has_children_p (lacc))
3040
            {
3041
              lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3042
              gimple_assign_set_lhs (*stmt, lhs);
3043
            }
3044
          else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3045
                   && !contains_vce_or_bfcref_p (rhs)
3046
                   && !access_has_children_p (racc))
3047
            rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3048
 
3049
          if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3050
            {
3051
              rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3052
                                     rhs);
3053
              if (is_gimple_reg_type (TREE_TYPE (lhs))
3054
                  && TREE_CODE (lhs) != SSA_NAME)
3055
                force_gimple_rhs = true;
3056
            }
3057
        }
3058
    }
3059
 
3060
  /* From this point on, the function deals with assignments in between
3061
     aggregates when at least one has scalar reductions of some of its
3062
     components.  There are three possible scenarios: Both the LHS and RHS have
3063
     to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3064
 
3065
     In the first case, we would like to load the LHS components from RHS
3066
     components whenever possible.  If that is not possible, we would like to
3067
     read it directly from the RHS (after updating it by storing in it its own
3068
     components).  If there are some necessary unscalarized data in the LHS,
3069
     those will be loaded by the original assignment too.  If neither of these
3070
     cases happen, the original statement can be removed.  Most of this is done
3071
     by load_assign_lhs_subreplacements.
3072
 
3073
     In the second case, we would like to store all RHS scalarized components
3074
     directly into LHS and if they cover the aggregate completely, remove the
3075
     statement too.  In the third case, we want the LHS components to be loaded
3076
     directly from the RHS (DSE will remove the original statement if it
3077
     becomes redundant).
3078
 
3079
     This is a bit complex but manageable when types match and when unions do
3080
     not cause confusion in a way that we cannot really load a component of LHS
3081
     from the RHS or vice versa (the access representing this level can have
3082
     subaccesses that are accessible only through a different union field at a
3083
     higher level - different from the one used in the examined expression).
3084
     Unions are fun.
3085
 
3086
     Therefore, I specially handle a fourth case, happening when there is a
3087
     specific type cast or it is impossible to locate a scalarized subaccess on
3088
     the other side of the expression.  If that happens, I simply "refresh" the
3089
     RHS by storing in it is scalarized components leave the original statement
3090
     there to do the copying and then load the scalar replacements of the LHS.
3091
     This is what the first branch does.  */
3092
 
3093
  if (modify_this_stmt
3094
      || gimple_has_volatile_ops (*stmt)
3095
      || contains_vce_or_bfcref_p (rhs)
3096
      || contains_vce_or_bfcref_p (lhs))
3097
    {
3098
      if (access_has_children_p (racc))
3099
        generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3100
                                 gsi, false, false, loc);
3101
      if (access_has_children_p (lacc))
3102
        generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
3103
                                 gsi, true, true, loc);
3104
      sra_stats.separate_lhs_rhs_handling++;
3105
 
3106
      /* This gimplification must be done after generate_subtree_copies,
3107
         lest we insert the subtree copies in the middle of the gimplified
3108
         sequence.  */
3109
      if (force_gimple_rhs)
3110
        rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3111
                                        true, GSI_SAME_STMT);
3112
      if (gimple_assign_rhs1 (*stmt) != rhs)
3113
        {
3114
          modify_this_stmt = true;
3115
          gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3116
          gcc_assert (*stmt == gsi_stmt (orig_gsi));
3117
        }
3118
 
3119
      return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3120
    }
3121
  else
3122
    {
3123
      if (access_has_children_p (lacc) && access_has_children_p (racc))
3124
        {
3125
          gimple_stmt_iterator orig_gsi = *gsi;
3126
          enum unscalarized_data_handling refreshed;
3127
 
3128
          if (lacc->grp_read && !lacc->grp_covered)
3129
            refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
3130
          else
3131
            refreshed = SRA_UDH_NONE;
3132
 
3133
          load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
3134
                                           &orig_gsi, gsi, &refreshed);
3135
          if (refreshed != SRA_UDH_RIGHT)
3136
            {
3137
              gsi_next (gsi);
3138
              unlink_stmt_vdef (*stmt);
3139
              gsi_remove (&orig_gsi, true);
3140
              sra_stats.deleted++;
3141
              return SRA_AM_REMOVED;
3142
            }
3143
        }
3144
      else
3145
        {
3146
          if (access_has_children_p (racc)
3147
              && !racc->grp_unscalarized_data)
3148
            {
3149
              if (dump_file)
3150
                {
3151
                  fprintf (dump_file, "Removing load: ");
3152
                  print_gimple_stmt (dump_file, *stmt, 0, 0);
3153
                }
3154
              generate_subtree_copies (racc->first_child, lhs,
3155
                                       racc->offset, 0, 0, gsi,
3156
                                       false, false, loc);
3157
              gcc_assert (*stmt == gsi_stmt (*gsi));
3158
              unlink_stmt_vdef (*stmt);
3159
              gsi_remove (gsi, true);
3160
              sra_stats.deleted++;
3161
              return SRA_AM_REMOVED;
3162
            }
3163
          /* Restore the aggregate RHS from its components so the
3164
             prevailing aggregate copy does the right thing.  */
3165
          if (access_has_children_p (racc))
3166
            generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3167
                                     gsi, false, false, loc);
3168
          /* Re-load the components of the aggregate copy destination.
3169
             But use the RHS aggregate to load from to expose more
3170
             optimization opportunities.  */
3171
          if (access_has_children_p (lacc))
3172
            generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3173
                                     0, 0, gsi, true, true, loc);
3174
        }
3175
 
3176
      return SRA_AM_NONE;
3177
    }
3178
}
3179
 
3180
/* Traverse the function body and all modifications as decided in
3181
   analyze_all_variable_accesses.  Return true iff the CFG has been
3182
   changed.  */
3183
 
3184
static bool
3185
sra_modify_function_body (void)
3186
{
3187
  bool cfg_changed = false;
3188
  basic_block bb;
3189
 
3190
  FOR_EACH_BB (bb)
3191
    {
3192
      gimple_stmt_iterator gsi = gsi_start_bb (bb);
3193
      while (!gsi_end_p (gsi))
3194
        {
3195
          gimple stmt = gsi_stmt (gsi);
3196
          enum assignment_mod_result assign_result;
3197
          bool modified = false, deleted = false;
3198
          tree *t;
3199
          unsigned i;
3200
 
3201
          switch (gimple_code (stmt))
3202
            {
3203
            case GIMPLE_RETURN:
3204
              t = gimple_return_retval_ptr (stmt);
3205
              if (*t != NULL_TREE)
3206
                modified |= sra_modify_expr (t, &gsi, false);
3207
              break;
3208
 
3209
            case GIMPLE_ASSIGN:
3210
              assign_result = sra_modify_assign (&stmt, &gsi);
3211
              modified |= assign_result == SRA_AM_MODIFIED;
3212
              deleted = assign_result == SRA_AM_REMOVED;
3213
              break;
3214
 
3215
            case GIMPLE_CALL:
3216
              /* Operands must be processed before the lhs.  */
3217
              for (i = 0; i < gimple_call_num_args (stmt); i++)
3218
                {
3219
                  t = gimple_call_arg_ptr (stmt, i);
3220
                  modified |= sra_modify_expr (t, &gsi, false);
3221
                }
3222
 
3223
              if (gimple_call_lhs (stmt))
3224
                {
3225
                  t = gimple_call_lhs_ptr (stmt);
3226
                  modified |= sra_modify_expr (t, &gsi, true);
3227
                }
3228
              break;
3229
 
3230
            case GIMPLE_ASM:
3231
              for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3232
                {
3233
                  t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3234
                  modified |= sra_modify_expr (t, &gsi, false);
3235
                }
3236
              for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3237
                {
3238
                  t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3239
                  modified |= sra_modify_expr (t, &gsi, true);
3240
                }
3241
              break;
3242
 
3243
            default:
3244
              break;
3245
            }
3246
 
3247
          if (modified)
3248
            {
3249
              update_stmt (stmt);
3250
              if (maybe_clean_eh_stmt (stmt)
3251
                  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3252
                cfg_changed = true;
3253
            }
3254
          if (!deleted)
3255
            gsi_next (&gsi);
3256
        }
3257
    }
3258
 
3259
  return cfg_changed;
3260
}
3261
 
3262
/* Generate statements initializing scalar replacements of parts of function
3263
   parameters.  */
3264
 
3265
static void
3266
initialize_parameter_reductions (void)
3267
{
3268
  gimple_stmt_iterator gsi;
3269
  gimple_seq seq = NULL;
3270
  tree parm;
3271
 
3272
  for (parm = DECL_ARGUMENTS (current_function_decl);
3273
       parm;
3274
       parm = DECL_CHAIN (parm))
3275
    {
3276
      VEC (access_p, heap) *access_vec;
3277
      struct access *access;
3278
 
3279
      if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3280
        continue;
3281
      access_vec = get_base_access_vector (parm);
3282
      if (!access_vec)
3283
        continue;
3284
 
3285
      if (!seq)
3286
        {
3287
          seq = gimple_seq_alloc ();
3288
          gsi = gsi_start (seq);
3289
        }
3290
 
3291
      for (access = VEC_index (access_p, access_vec, 0);
3292
           access;
3293
           access = access->next_grp)
3294
        generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3295
                                 EXPR_LOCATION (parm));
3296
    }
3297
 
3298
  if (seq)
3299
    gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3300
}
3301
 
3302
/* The "main" function of intraprocedural SRA passes.  Runs the analysis and if
3303
   it reveals there are components of some aggregates to be scalarized, it runs
3304
   the required transformations.  */
3305
static unsigned int
3306
perform_intra_sra (void)
3307
{
3308
  int ret = 0;
3309
  sra_initialize ();
3310
 
3311
  if (!find_var_candidates ())
3312
    goto out;
3313
 
3314
  if (!scan_function ())
3315
    goto out;
3316
 
3317
  if (!analyze_all_variable_accesses ())
3318
    goto out;
3319
 
3320
  if (sra_modify_function_body ())
3321
    ret = TODO_update_ssa | TODO_cleanup_cfg;
3322
  else
3323
    ret = TODO_update_ssa;
3324
  initialize_parameter_reductions ();
3325
 
3326
  statistics_counter_event (cfun, "Scalar replacements created",
3327
                            sra_stats.replacements);
3328
  statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3329
  statistics_counter_event (cfun, "Subtree copy stmts",
3330
                            sra_stats.subtree_copies);
3331
  statistics_counter_event (cfun, "Subreplacement stmts",
3332
                            sra_stats.subreplacements);
3333
  statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3334
  statistics_counter_event (cfun, "Separate LHS and RHS handling",
3335
                            sra_stats.separate_lhs_rhs_handling);
3336
 
3337
 out:
3338
  sra_deinitialize ();
3339
  return ret;
3340
}
3341
 
3342
/* Perform early intraprocedural SRA.  */
3343
static unsigned int
3344
early_intra_sra (void)
3345
{
3346
  sra_mode = SRA_MODE_EARLY_INTRA;
3347
  return perform_intra_sra ();
3348
}
3349
 
3350
/* Perform "late" intraprocedural SRA.  */
3351
static unsigned int
3352
late_intra_sra (void)
3353
{
3354
  sra_mode = SRA_MODE_INTRA;
3355
  return perform_intra_sra ();
3356
}
3357
 
3358
 
3359
static bool
3360
gate_intra_sra (void)
3361
{
3362
  return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3363
}
3364
 
3365
 
3366
struct gimple_opt_pass pass_sra_early =
3367
{
3368
 {
3369
  GIMPLE_PASS,
3370
  "esra",                               /* name */
3371
  gate_intra_sra,                       /* gate */
3372
  early_intra_sra,                      /* execute */
3373
  NULL,                                 /* sub */
3374
  NULL,                                 /* next */
3375
  0,                                     /* static_pass_number */
3376
  TV_TREE_SRA,                          /* tv_id */
3377
  PROP_cfg | PROP_ssa,                  /* properties_required */
3378
  0,                                     /* properties_provided */
3379
  0,                                     /* properties_destroyed */
3380
  0,                                     /* todo_flags_start */
3381
  TODO_update_ssa
3382
  | TODO_ggc_collect
3383
  | TODO_verify_ssa                     /* todo_flags_finish */
3384
 }
3385
};
3386
 
3387
struct gimple_opt_pass pass_sra =
3388
{
3389
 {
3390
  GIMPLE_PASS,
3391
  "sra",                                /* name */
3392
  gate_intra_sra,                       /* gate */
3393
  late_intra_sra,                       /* execute */
3394
  NULL,                                 /* sub */
3395
  NULL,                                 /* next */
3396
  0,                                     /* static_pass_number */
3397
  TV_TREE_SRA,                          /* tv_id */
3398
  PROP_cfg | PROP_ssa,                  /* properties_required */
3399
  0,                                     /* properties_provided */
3400
  0,                                     /* properties_destroyed */
3401
  TODO_update_address_taken,            /* todo_flags_start */
3402
  TODO_update_ssa
3403
  | TODO_ggc_collect
3404
  | TODO_verify_ssa                     /* todo_flags_finish */
3405
 }
3406
};
3407
 
3408
 
3409
/* Return true iff PARM (which must be a parm_decl) is an unused scalar
3410
   parameter.  */
3411
 
3412
static bool
3413
is_unused_scalar_param (tree parm)
3414
{
3415
  tree name;
3416
  return (is_gimple_reg (parm)
3417
          && (!(name = gimple_default_def (cfun, parm))
3418
              || has_zero_uses (name)));
3419
}
3420
 
3421
/* Scan immediate uses of a default definition SSA name of a parameter PARM and
3422
   examine whether there are any direct or otherwise infeasible ones.  If so,
3423
   return true, otherwise return false.  PARM must be a gimple register with a
3424
   non-NULL default definition.  */
3425
 
3426
static bool
3427
ptr_parm_has_direct_uses (tree parm)
3428
{
3429
  imm_use_iterator ui;
3430
  gimple stmt;
3431
  tree name = gimple_default_def (cfun, parm);
3432
  bool ret = false;
3433
 
3434
  FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3435
    {
3436
      int uses_ok = 0;
3437
      use_operand_p use_p;
3438
 
3439
      if (is_gimple_debug (stmt))
3440
        continue;
3441
 
3442
      /* Valid uses include dereferences on the lhs and the rhs.  */
3443
      if (gimple_has_lhs (stmt))
3444
        {
3445
          tree lhs = gimple_get_lhs (stmt);
3446
          while (handled_component_p (lhs))
3447
            lhs = TREE_OPERAND (lhs, 0);
3448
          if (TREE_CODE (lhs) == MEM_REF
3449
              && TREE_OPERAND (lhs, 0) == name
3450
              && integer_zerop (TREE_OPERAND (lhs, 1))
3451
              && types_compatible_p (TREE_TYPE (lhs),
3452
                                     TREE_TYPE (TREE_TYPE (name)))
3453
              && !TREE_THIS_VOLATILE (lhs))
3454
            uses_ok++;
3455
        }
3456
      if (gimple_assign_single_p (stmt))
3457
        {
3458
          tree rhs = gimple_assign_rhs1 (stmt);
3459
          while (handled_component_p (rhs))
3460
            rhs = TREE_OPERAND (rhs, 0);
3461
          if (TREE_CODE (rhs) == MEM_REF
3462
              && TREE_OPERAND (rhs, 0) == name
3463
              && integer_zerop (TREE_OPERAND (rhs, 1))
3464
              && types_compatible_p (TREE_TYPE (rhs),
3465
                                     TREE_TYPE (TREE_TYPE (name)))
3466
              && !TREE_THIS_VOLATILE (rhs))
3467
            uses_ok++;
3468
        }
3469
      else if (is_gimple_call (stmt))
3470
        {
3471
          unsigned i;
3472
          for (i = 0; i < gimple_call_num_args (stmt); ++i)
3473
            {
3474
              tree arg = gimple_call_arg (stmt, i);
3475
              while (handled_component_p (arg))
3476
                arg = TREE_OPERAND (arg, 0);
3477
              if (TREE_CODE (arg) == MEM_REF
3478
                  && TREE_OPERAND (arg, 0) == name
3479
                  && integer_zerop (TREE_OPERAND (arg, 1))
3480
                  && types_compatible_p (TREE_TYPE (arg),
3481
                                         TREE_TYPE (TREE_TYPE (name)))
3482
                  && !TREE_THIS_VOLATILE (arg))
3483
                uses_ok++;
3484
            }
3485
        }
3486
 
3487
      /* If the number of valid uses does not match the number of
3488
         uses in this stmt there is an unhandled use.  */
3489
      FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3490
        --uses_ok;
3491
 
3492
      if (uses_ok != 0)
3493
        ret = true;
3494
 
3495
      if (ret)
3496
        BREAK_FROM_IMM_USE_STMT (ui);
3497
    }
3498
 
3499
  return ret;
3500
}
3501
 
3502
/* Identify candidates for reduction for IPA-SRA based on their type and mark
3503
   them in candidate_bitmap.  Note that these do not necessarily include
3504
   parameter which are unused and thus can be removed.  Return true iff any
3505
   such candidate has been found.  */
3506
 
3507
static bool
3508
find_param_candidates (void)
3509
{
3510
  tree parm;
3511
  int count = 0;
3512
  bool ret = false;
3513
  const char *msg;
3514
 
3515
  for (parm = DECL_ARGUMENTS (current_function_decl);
3516
       parm;
3517
       parm = DECL_CHAIN (parm))
3518
    {
3519
      tree type = TREE_TYPE (parm);
3520
 
3521
      count++;
3522
 
3523
      if (TREE_THIS_VOLATILE (parm)
3524
          || TREE_ADDRESSABLE (parm)
3525
          || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3526
        continue;
3527
 
3528
      if (is_unused_scalar_param (parm))
3529
        {
3530
          ret = true;
3531
          continue;
3532
        }
3533
 
3534
      if (POINTER_TYPE_P (type))
3535
        {
3536
          type = TREE_TYPE (type);
3537
 
3538
          if (TREE_CODE (type) == FUNCTION_TYPE
3539
              || TYPE_VOLATILE (type)
3540
              || (TREE_CODE (type) == ARRAY_TYPE
3541
                  && TYPE_NONALIASED_COMPONENT (type))
3542
              || !is_gimple_reg (parm)
3543
              || is_va_list_type (type)
3544
              || ptr_parm_has_direct_uses (parm))
3545
            continue;
3546
        }
3547
      else if (!AGGREGATE_TYPE_P (type))
3548
        continue;
3549
 
3550
      if (!COMPLETE_TYPE_P (type)
3551
          || !host_integerp (TYPE_SIZE (type), 1)
3552
          || tree_low_cst (TYPE_SIZE (type), 1) == 0
3553
          || (AGGREGATE_TYPE_P (type)
3554
              && type_internals_preclude_sra_p (type, &msg)))
3555
        continue;
3556
 
3557
      bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3558
      ret = true;
3559
      if (dump_file && (dump_flags & TDF_DETAILS))
3560
        {
3561
          fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3562
          print_generic_expr (dump_file, parm, 0);
3563
          fprintf (dump_file, "\n");
3564
        }
3565
    }
3566
 
3567
  func_param_count = count;
3568
  return ret;
3569
}
3570
 
3571
/* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3572
   maybe_modified. */
3573
 
3574
static bool
3575
mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3576
                     void *data)
3577
{
3578
  struct access *repr = (struct access *) data;
3579
 
3580
  repr->grp_maybe_modified = 1;
3581
  return true;
3582
}
3583
 
3584
/* Analyze what representatives (in linked lists accessible from
3585
   REPRESENTATIVES) can be modified by side effects of statements in the
3586
   current function.  */
3587
 
3588
static void
3589
analyze_modified_params (VEC (access_p, heap) *representatives)
3590
{
3591
  int i;
3592
 
3593
  for (i = 0; i < func_param_count; i++)
3594
    {
3595
      struct access *repr;
3596
 
3597
      for (repr = VEC_index (access_p, representatives, i);
3598
           repr;
3599
           repr = repr->next_grp)
3600
        {
3601
          struct access *access;
3602
          bitmap visited;
3603
          ao_ref ar;
3604
 
3605
          if (no_accesses_p (repr))
3606
            continue;
3607
          if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3608
              || repr->grp_maybe_modified)
3609
            continue;
3610
 
3611
          ao_ref_init (&ar, repr->expr);
3612
          visited = BITMAP_ALLOC (NULL);
3613
          for (access = repr; access; access = access->next_sibling)
3614
            {
3615
              /* All accesses are read ones, otherwise grp_maybe_modified would
3616
                 be trivially set.  */
3617
              walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3618
                                  mark_maybe_modified, repr, &visited);
3619
              if (repr->grp_maybe_modified)
3620
                break;
3621
            }
3622
          BITMAP_FREE (visited);
3623
        }
3624
    }
3625
}
3626
 
3627
/* Propagate distances in bb_dereferences in the opposite direction than the
3628
   control flow edges, in each step storing the maximum of the current value
3629
   and the minimum of all successors.  These steps are repeated until the table
3630
   stabilizes.  Note that BBs which might terminate the functions (according to
3631
   final_bbs bitmap) never updated in this way.  */
3632
 
3633
static void
3634
propagate_dereference_distances (void)
3635
{
3636
  VEC (basic_block, heap) *queue;
3637
  basic_block bb;
3638
 
3639
  queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3640
  VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3641
  FOR_EACH_BB (bb)
3642
    {
3643
      VEC_quick_push (basic_block, queue, bb);
3644
      bb->aux = bb;
3645
    }
3646
 
3647
  while (!VEC_empty (basic_block, queue))
3648
    {
3649
      edge_iterator ei;
3650
      edge e;
3651
      bool change = false;
3652
      int i;
3653
 
3654
      bb = VEC_pop (basic_block, queue);
3655
      bb->aux = NULL;
3656
 
3657
      if (bitmap_bit_p (final_bbs, bb->index))
3658
        continue;
3659
 
3660
      for (i = 0; i < func_param_count; i++)
3661
        {
3662
          int idx = bb->index * func_param_count + i;
3663
          bool first = true;
3664
          HOST_WIDE_INT inh = 0;
3665
 
3666
          FOR_EACH_EDGE (e, ei, bb->succs)
3667
          {
3668
            int succ_idx = e->dest->index * func_param_count + i;
3669
 
3670
            if (e->src == EXIT_BLOCK_PTR)
3671
              continue;
3672
 
3673
            if (first)
3674
              {
3675
                first = false;
3676
                inh = bb_dereferences [succ_idx];
3677
              }
3678
            else if (bb_dereferences [succ_idx] < inh)
3679
              inh = bb_dereferences [succ_idx];
3680
          }
3681
 
3682
          if (!first && bb_dereferences[idx] < inh)
3683
            {
3684
              bb_dereferences[idx] = inh;
3685
              change = true;
3686
            }
3687
        }
3688
 
3689
      if (change && !bitmap_bit_p (final_bbs, bb->index))
3690
        FOR_EACH_EDGE (e, ei, bb->preds)
3691
          {
3692
            if (e->src->aux)
3693
              continue;
3694
 
3695
            e->src->aux = e->src;
3696
            VEC_quick_push (basic_block, queue, e->src);
3697
          }
3698
    }
3699
 
3700
  VEC_free (basic_block, heap, queue);
3701
}
3702
 
3703
/* Dump a dereferences TABLE with heading STR to file F.  */
3704
 
3705
static void
3706
dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3707
{
3708
  basic_block bb;
3709
 
3710
  fprintf (dump_file, str);
3711
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3712
    {
3713
      fprintf (f, "%4i  %i   ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3714
      if (bb != EXIT_BLOCK_PTR)
3715
        {
3716
          int i;
3717
          for (i = 0; i < func_param_count; i++)
3718
            {
3719
              int idx = bb->index * func_param_count + i;
3720
              fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3721
            }
3722
        }
3723
      fprintf (f, "\n");
3724
    }
3725
  fprintf (dump_file, "\n");
3726
}
3727
 
3728
/* Determine what (parts of) parameters passed by reference that are not
3729
   assigned to are not certainly dereferenced in this function and thus the
3730
   dereferencing cannot be safely moved to the caller without potentially
3731
   introducing a segfault.  Mark such REPRESENTATIVES as
3732
   grp_not_necessarilly_dereferenced.
3733
 
3734
   The dereferenced maximum "distance," i.e. the offset + size of the accessed
3735
   part is calculated rather than simple booleans are calculated for each
3736
   pointer parameter to handle cases when only a fraction of the whole
3737
   aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3738
   an example).
3739
 
3740
   The maximum dereference distances for each pointer parameter and BB are
3741
   already stored in bb_dereference.  This routine simply propagates these
3742
   values upwards by propagate_dereference_distances and then compares the
3743
   distances of individual parameters in the ENTRY BB to the equivalent
3744
   distances of each representative of a (fraction of a) parameter.  */
3745
 
3746
static void
3747
analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3748
{
3749
  int i;
3750
 
3751
  if (dump_file && (dump_flags & TDF_DETAILS))
3752
    dump_dereferences_table (dump_file,
3753
                             "Dereference table before propagation:\n",
3754
                             bb_dereferences);
3755
 
3756
  propagate_dereference_distances ();
3757
 
3758
  if (dump_file && (dump_flags & TDF_DETAILS))
3759
    dump_dereferences_table (dump_file,
3760
                             "Dereference table after propagation:\n",
3761
                             bb_dereferences);
3762
 
3763
  for (i = 0; i < func_param_count; i++)
3764
    {
3765
      struct access *repr = VEC_index (access_p, representatives, i);
3766
      int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3767
 
3768
      if (!repr || no_accesses_p (repr))
3769
        continue;
3770
 
3771
      do
3772
        {
3773
          if ((repr->offset + repr->size) > bb_dereferences[idx])
3774
            repr->grp_not_necessarilly_dereferenced = 1;
3775
          repr = repr->next_grp;
3776
        }
3777
      while (repr);
3778
    }
3779
}
3780
 
3781
/* Return the representative access for the parameter declaration PARM if it is
3782
   a scalar passed by reference which is not written to and the pointer value
3783
   is not used directly.  Thus, if it is legal to dereference it in the caller
3784
   and we can rule out modifications through aliases, such parameter should be
3785
   turned into one passed by value.  Return NULL otherwise.  */
3786
 
3787
static struct access *
3788
unmodified_by_ref_scalar_representative (tree parm)
3789
{
3790
  int i, access_count;
3791
  struct access *repr;
3792
  VEC (access_p, heap) *access_vec;
3793
 
3794
  access_vec = get_base_access_vector (parm);
3795
  gcc_assert (access_vec);
3796
  repr = VEC_index (access_p, access_vec, 0);
3797
  if (repr->write)
3798
    return NULL;
3799
  repr->group_representative = repr;
3800
 
3801
  access_count = VEC_length (access_p, access_vec);
3802
  for (i = 1; i < access_count; i++)
3803
    {
3804
      struct access *access = VEC_index (access_p, access_vec, i);
3805
      if (access->write)
3806
        return NULL;
3807
      access->group_representative = repr;
3808
      access->next_sibling = repr->next_sibling;
3809
      repr->next_sibling = access;
3810
    }
3811
 
3812
  repr->grp_read = 1;
3813
  repr->grp_scalar_ptr = 1;
3814
  return repr;
3815
}
3816
 
3817
/* Return true iff this access precludes IPA-SRA of the parameter it is
3818
   associated with. */
3819
 
3820
static bool
3821
access_precludes_ipa_sra_p (struct access *access)
3822
{
3823
  /* Avoid issues such as the second simple testcase in PR 42025.  The problem
3824
     is incompatible assign in a call statement (and possibly even in asm
3825
     statements).  This can be relaxed by using a new temporary but only for
3826
     non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3827
     intraprocedural SRA we deal with this by keeping the old aggregate around,
3828
     something we cannot do in IPA-SRA.)  */
3829
  if (access->write
3830
      && (is_gimple_call (access->stmt)
3831
          || gimple_code (access->stmt) == GIMPLE_ASM))
3832
    return true;
3833
 
3834
  if (STRICT_ALIGNMENT
3835
      && tree_non_aligned_mem_p (access->expr, TYPE_ALIGN (access->type)))
3836
    return true;
3837
 
3838
  return false;
3839
}
3840
 
3841
 
3842
/* Sort collected accesses for parameter PARM, identify representatives for
3843
   each accessed region and link them together.  Return NULL if there are
3844
   different but overlapping accesses, return the special ptr value meaning
3845
   there are no accesses for this parameter if that is the case and return the
3846
   first representative otherwise.  Set *RO_GRP if there is a group of accesses
3847
   with only read (i.e. no write) accesses.  */
3848
 
3849
static struct access *
3850
splice_param_accesses (tree parm, bool *ro_grp)
3851
{
3852
  int i, j, access_count, group_count;
3853
  int agg_size, total_size = 0;
3854
  struct access *access, *res, **prev_acc_ptr = &res;
3855
  VEC (access_p, heap) *access_vec;
3856
 
3857
  access_vec = get_base_access_vector (parm);
3858
  if (!access_vec)
3859
    return &no_accesses_representant;
3860
  access_count = VEC_length (access_p, access_vec);
3861
 
3862
  VEC_qsort (access_p, access_vec, compare_access_positions);
3863
 
3864
  i = 0;
3865
  total_size = 0;
3866
  group_count = 0;
3867
  while (i < access_count)
3868
    {
3869
      bool modification;
3870
      tree a1_alias_type;
3871
      access = VEC_index (access_p, access_vec, i);
3872
      modification = access->write;
3873
      if (access_precludes_ipa_sra_p (access))
3874
        return NULL;
3875
      a1_alias_type = reference_alias_ptr_type (access->expr);
3876
 
3877
      /* Access is about to become group representative unless we find some
3878
         nasty overlap which would preclude us from breaking this parameter
3879
         apart. */
3880
 
3881
      j = i + 1;
3882
      while (j < access_count)
3883
        {
3884
          struct access *ac2 = VEC_index (access_p, access_vec, j);
3885
          if (ac2->offset != access->offset)
3886
            {
3887
              /* All or nothing law for parameters. */
3888
              if (access->offset + access->size > ac2->offset)
3889
                return NULL;
3890
              else
3891
                break;
3892
            }
3893
          else if (ac2->size != access->size)
3894
            return NULL;
3895
 
3896
          if (access_precludes_ipa_sra_p (ac2)
3897
              || (ac2->type != access->type
3898
                  && (TREE_ADDRESSABLE (ac2->type)
3899
                      || TREE_ADDRESSABLE (access->type)))
3900
              || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3901
            return NULL;
3902
 
3903
          modification |= ac2->write;
3904
          ac2->group_representative = access;
3905
          ac2->next_sibling = access->next_sibling;
3906
          access->next_sibling = ac2;
3907
          j++;
3908
        }
3909
 
3910
      group_count++;
3911
      access->grp_maybe_modified = modification;
3912
      if (!modification)
3913
        *ro_grp = true;
3914
      *prev_acc_ptr = access;
3915
      prev_acc_ptr = &access->next_grp;
3916
      total_size += access->size;
3917
      i = j;
3918
    }
3919
 
3920
  if (POINTER_TYPE_P (TREE_TYPE (parm)))
3921
    agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3922
  else
3923
    agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3924
  if (total_size >= agg_size)
3925
    return NULL;
3926
 
3927
  gcc_assert (group_count > 0);
3928
  return res;
3929
}
3930
 
3931
/* Decide whether parameters with representative accesses given by REPR should
3932
   be reduced into components.  */
3933
 
3934
static int
3935
decide_one_param_reduction (struct access *repr)
3936
{
3937
  int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3938
  bool by_ref;
3939
  tree parm;
3940
 
3941
  parm = repr->base;
3942
  cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3943
  gcc_assert (cur_parm_size > 0);
3944
 
3945
  if (POINTER_TYPE_P (TREE_TYPE (parm)))
3946
    {
3947
      by_ref = true;
3948
      agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3949
    }
3950
  else
3951
    {
3952
      by_ref = false;
3953
      agg_size = cur_parm_size;
3954
    }
3955
 
3956
  if (dump_file)
3957
    {
3958
      struct access *acc;
3959
      fprintf (dump_file, "Evaluating PARAM group sizes for ");
3960
      print_generic_expr (dump_file, parm, 0);
3961
      fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3962
      for (acc = repr; acc; acc = acc->next_grp)
3963
        dump_access (dump_file, acc, true);
3964
    }
3965
 
3966
  total_size = 0;
3967
  new_param_count = 0;
3968
 
3969
  for (; repr; repr = repr->next_grp)
3970
    {
3971
      gcc_assert (parm == repr->base);
3972
 
3973
      /* Taking the address of a non-addressable field is verboten.  */
3974
      if (by_ref && repr->non_addressable)
3975
        return 0;
3976
 
3977
      /* Do not decompose a non-BLKmode param in a way that would
3978
         create BLKmode params.  Especially for by-reference passing
3979
         (thus, pointer-type param) this is hardly worthwhile.  */
3980
      if (DECL_MODE (parm) != BLKmode
3981
          && TYPE_MODE (repr->type) == BLKmode)
3982
        return 0;
3983
 
3984
      if (!by_ref || (!repr->grp_maybe_modified
3985
                      && !repr->grp_not_necessarilly_dereferenced))
3986
        total_size += repr->size;
3987
      else
3988
        total_size += cur_parm_size;
3989
 
3990
      new_param_count++;
3991
    }
3992
 
3993
  gcc_assert (new_param_count > 0);
3994
 
3995
  if (optimize_function_for_size_p (cfun))
3996
    parm_size_limit = cur_parm_size;
3997
  else
3998
    parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3999
                       * cur_parm_size);
4000
 
4001
  if (total_size < agg_size
4002
      && total_size <= parm_size_limit)
4003
    {
4004
      if (dump_file)
4005
        fprintf (dump_file, "    ....will be split into %i components\n",
4006
                 new_param_count);
4007
      return new_param_count;
4008
    }
4009
  else
4010
    return 0;
4011
}
4012
 
4013
/* The order of the following enums is important, we need to do extra work for
4014
   UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES.  */
4015
enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4016
                          MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4017
 
4018
/* Identify representatives of all accesses to all candidate parameters for
4019
   IPA-SRA.  Return result based on what representatives have been found. */
4020
 
4021
static enum ipa_splicing_result
4022
splice_all_param_accesses (VEC (access_p, heap) **representatives)
4023
{
4024
  enum ipa_splicing_result result = NO_GOOD_ACCESS;
4025
  tree parm;
4026
  struct access *repr;
4027
 
4028
  *representatives = VEC_alloc (access_p, heap, func_param_count);
4029
 
4030
  for (parm = DECL_ARGUMENTS (current_function_decl);
4031
       parm;
4032
       parm = DECL_CHAIN (parm))
4033
    {
4034
      if (is_unused_scalar_param (parm))
4035
        {
4036
          VEC_quick_push (access_p, *representatives,
4037
                          &no_accesses_representant);
4038
          if (result == NO_GOOD_ACCESS)
4039
            result = UNUSED_PARAMS;
4040
        }
4041
      else if (POINTER_TYPE_P (TREE_TYPE (parm))
4042
               && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4043
               && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4044
        {
4045
          repr = unmodified_by_ref_scalar_representative (parm);
4046
          VEC_quick_push (access_p, *representatives, repr);
4047
          if (repr)
4048
            result = UNMODIF_BY_REF_ACCESSES;
4049
        }
4050
      else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4051
        {
4052
          bool ro_grp = false;
4053
          repr = splice_param_accesses (parm, &ro_grp);
4054
          VEC_quick_push (access_p, *representatives, repr);
4055
 
4056
          if (repr && !no_accesses_p (repr))
4057
            {
4058
              if (POINTER_TYPE_P (TREE_TYPE (parm)))
4059
                {
4060
                  if (ro_grp)
4061
                    result = UNMODIF_BY_REF_ACCESSES;
4062
                  else if (result < MODIF_BY_REF_ACCESSES)
4063
                    result = MODIF_BY_REF_ACCESSES;
4064
                }
4065
              else if (result < BY_VAL_ACCESSES)
4066
                result = BY_VAL_ACCESSES;
4067
            }
4068
          else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4069
            result = UNUSED_PARAMS;
4070
        }
4071
      else
4072
        VEC_quick_push (access_p, *representatives, NULL);
4073
    }
4074
 
4075
  if (result == NO_GOOD_ACCESS)
4076
    {
4077
      VEC_free (access_p, heap, *representatives);
4078
      *representatives = NULL;
4079
      return NO_GOOD_ACCESS;
4080
    }
4081
 
4082
  return result;
4083
}
4084
 
4085
/* Return the index of BASE in PARMS.  Abort if it is not found.  */
4086
 
4087
static inline int
4088
get_param_index (tree base, VEC(tree, heap) *parms)
4089
{
4090
  int i, len;
4091
 
4092
  len = VEC_length (tree, parms);
4093
  for (i = 0; i < len; i++)
4094
    if (VEC_index (tree, parms, i) == base)
4095
      return i;
4096
  gcc_unreachable ();
4097
}
4098
 
4099
/* Convert the decisions made at the representative level into compact
4100
   parameter adjustments.  REPRESENTATIVES are pointers to first
4101
   representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4102
   final number of adjustments.  */
4103
 
4104
static ipa_parm_adjustment_vec
4105
turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
4106
                                       int adjustments_count)
4107
{
4108
  VEC (tree, heap) *parms;
4109
  ipa_parm_adjustment_vec adjustments;
4110
  tree parm;
4111
  int i;
4112
 
4113
  gcc_assert (adjustments_count > 0);
4114
  parms = ipa_get_vector_of_formal_parms (current_function_decl);
4115
  adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
4116
  parm = DECL_ARGUMENTS (current_function_decl);
4117
  for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4118
    {
4119
      struct access *repr = VEC_index (access_p, representatives, i);
4120
 
4121
      if (!repr || no_accesses_p (repr))
4122
        {
4123
          struct ipa_parm_adjustment *adj;
4124
 
4125
          adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4126
          memset (adj, 0, sizeof (*adj));
4127
          adj->base_index = get_param_index (parm, parms);
4128
          adj->base = parm;
4129
          if (!repr)
4130
            adj->copy_param = 1;
4131
          else
4132
            adj->remove_param = 1;
4133
        }
4134
      else
4135
        {
4136
          struct ipa_parm_adjustment *adj;
4137
          int index = get_param_index (parm, parms);
4138
 
4139
          for (; repr; repr = repr->next_grp)
4140
            {
4141
              adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4142
              memset (adj, 0, sizeof (*adj));
4143
              gcc_assert (repr->base == parm);
4144
              adj->base_index = index;
4145
              adj->base = repr->base;
4146
              adj->type = repr->type;
4147
              adj->alias_ptr_type = reference_alias_ptr_type (repr->expr);
4148
              adj->offset = repr->offset;
4149
              adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4150
                             && (repr->grp_maybe_modified
4151
                                 || repr->grp_not_necessarilly_dereferenced));
4152
 
4153
            }
4154
        }
4155
    }
4156
  VEC_free (tree, heap, parms);
4157
  return adjustments;
4158
}
4159
 
4160
/* Analyze the collected accesses and produce a plan what to do with the
4161
   parameters in the form of adjustments, NULL meaning nothing.  */
4162
 
4163
static ipa_parm_adjustment_vec
4164
analyze_all_param_acesses (void)
4165
{
4166
  enum ipa_splicing_result repr_state;
4167
  bool proceed = false;
4168
  int i, adjustments_count = 0;
4169
  VEC (access_p, heap) *representatives;
4170
  ipa_parm_adjustment_vec adjustments;
4171
 
4172
  repr_state = splice_all_param_accesses (&representatives);
4173
  if (repr_state == NO_GOOD_ACCESS)
4174
    return NULL;
4175
 
4176
  /* If there are any parameters passed by reference which are not modified
4177
     directly, we need to check whether they can be modified indirectly.  */
4178
  if (repr_state == UNMODIF_BY_REF_ACCESSES)
4179
    {
4180
      analyze_caller_dereference_legality (representatives);
4181
      analyze_modified_params (representatives);
4182
    }
4183
 
4184
  for (i = 0; i < func_param_count; i++)
4185
    {
4186
      struct access *repr = VEC_index (access_p, representatives, i);
4187
 
4188
      if (repr && !no_accesses_p (repr))
4189
        {
4190
          if (repr->grp_scalar_ptr)
4191
            {
4192
              adjustments_count++;
4193
              if (repr->grp_not_necessarilly_dereferenced
4194
                  || repr->grp_maybe_modified)
4195
                VEC_replace (access_p, representatives, i, NULL);
4196
              else
4197
                {
4198
                  proceed = true;
4199
                  sra_stats.scalar_by_ref_to_by_val++;
4200
                }
4201
            }
4202
          else
4203
            {
4204
              int new_components = decide_one_param_reduction (repr);
4205
 
4206
              if (new_components == 0)
4207
                {
4208
                  VEC_replace (access_p, representatives, i, NULL);
4209
                  adjustments_count++;
4210
                }
4211
              else
4212
                {
4213
                  adjustments_count += new_components;
4214
                  sra_stats.aggregate_params_reduced++;
4215
                  sra_stats.param_reductions_created += new_components;
4216
                  proceed = true;
4217
                }
4218
            }
4219
        }
4220
      else
4221
        {
4222
          if (no_accesses_p (repr))
4223
            {
4224
              proceed = true;
4225
              sra_stats.deleted_unused_parameters++;
4226
            }
4227
          adjustments_count++;
4228
        }
4229
    }
4230
 
4231
  if (!proceed && dump_file)
4232
    fprintf (dump_file, "NOT proceeding to change params.\n");
4233
 
4234
  if (proceed)
4235
    adjustments = turn_representatives_into_adjustments (representatives,
4236
                                                         adjustments_count);
4237
  else
4238
    adjustments = NULL;
4239
 
4240
  VEC_free (access_p, heap, representatives);
4241
  return adjustments;
4242
}
4243
 
4244
/* If a parameter replacement identified by ADJ does not yet exist in the form
4245
   of declaration, create it and record it, otherwise return the previously
4246
   created one.  */
4247
 
4248
static tree
4249
get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4250
{
4251
  tree repl;
4252
  if (!adj->new_ssa_base)
4253
    {
4254
      char *pretty_name = make_fancy_name (adj->base);
4255
 
4256
      repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4257
      DECL_NAME (repl) = get_identifier (pretty_name);
4258
      obstack_free (&name_obstack, pretty_name);
4259
 
4260
      add_referenced_var (repl);
4261
      adj->new_ssa_base = repl;
4262
    }
4263
  else
4264
    repl = adj->new_ssa_base;
4265
  return repl;
4266
}
4267
 
4268
/* Find the first adjustment for a particular parameter BASE in a vector of
4269
   ADJUSTMENTS which is not a copy_param.  Return NULL if there is no such
4270
   adjustment. */
4271
 
4272
static struct ipa_parm_adjustment *
4273
get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4274
{
4275
  int i, len;
4276
 
4277
  len = VEC_length (ipa_parm_adjustment_t, adjustments);
4278
  for (i = 0; i < len; i++)
4279
    {
4280
      struct ipa_parm_adjustment *adj;
4281
 
4282
      adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4283
      if (!adj->copy_param && adj->base == base)
4284
        return adj;
4285
    }
4286
 
4287
  return NULL;
4288
}
4289
 
4290
/* If the statement STMT defines an SSA_NAME of a parameter which is to be
4291
   removed because its value is not used, replace the SSA_NAME with a one
4292
   relating to a created VAR_DECL together all of its uses and return true.
4293
   ADJUSTMENTS is a pointer to an adjustments vector.  */
4294
 
4295
static bool
4296
replace_removed_params_ssa_names (gimple stmt,
4297
                                  ipa_parm_adjustment_vec adjustments)
4298
{
4299
  struct ipa_parm_adjustment *adj;
4300
  tree lhs, decl, repl, name;
4301
 
4302
  if (gimple_code (stmt) == GIMPLE_PHI)
4303
    lhs = gimple_phi_result (stmt);
4304
  else if (is_gimple_assign (stmt))
4305
    lhs = gimple_assign_lhs (stmt);
4306
  else if (is_gimple_call (stmt))
4307
    lhs = gimple_call_lhs (stmt);
4308
  else
4309
    gcc_unreachable ();
4310
 
4311
  if (TREE_CODE (lhs) != SSA_NAME)
4312
    return false;
4313
  decl = SSA_NAME_VAR (lhs);
4314
  if (TREE_CODE (decl) != PARM_DECL)
4315
    return false;
4316
 
4317
  adj = get_adjustment_for_base (adjustments, decl);
4318
  if (!adj)
4319
    return false;
4320
 
4321
  repl = get_replaced_param_substitute (adj);
4322
  name = make_ssa_name (repl, stmt);
4323
 
4324
  if (dump_file)
4325
    {
4326
      fprintf (dump_file, "replacing an SSA name of a removed param ");
4327
      print_generic_expr (dump_file, lhs, 0);
4328
      fprintf (dump_file, " with ");
4329
      print_generic_expr (dump_file, name, 0);
4330
      fprintf (dump_file, "\n");
4331
    }
4332
 
4333
  if (is_gimple_assign (stmt))
4334
    gimple_assign_set_lhs (stmt, name);
4335
  else if (is_gimple_call (stmt))
4336
    gimple_call_set_lhs (stmt, name);
4337
  else
4338
    gimple_phi_set_result (stmt, name);
4339
 
4340
  replace_uses_by (lhs, name);
4341
  release_ssa_name (lhs);
4342
  return true;
4343
}
4344
 
4345
/* If the expression *EXPR should be replaced by a reduction of a parameter, do
4346
   so.  ADJUSTMENTS is a pointer to a vector of adjustments.  CONVERT
4347
   specifies whether the function should care about type incompatibility the
4348
   current and new expressions.  If it is false, the function will leave
4349
   incompatibility issues to the caller.  Return true iff the expression
4350
   was modified. */
4351
 
4352
static bool
4353
sra_ipa_modify_expr (tree *expr, bool convert,
4354
                     ipa_parm_adjustment_vec adjustments)
4355
{
4356
  int i, len;
4357
  struct ipa_parm_adjustment *adj, *cand = NULL;
4358
  HOST_WIDE_INT offset, size, max_size;
4359
  tree base, src;
4360
 
4361
  len = VEC_length (ipa_parm_adjustment_t, adjustments);
4362
 
4363
  if (TREE_CODE (*expr) == BIT_FIELD_REF
4364
      || TREE_CODE (*expr) == IMAGPART_EXPR
4365
      || TREE_CODE (*expr) == REALPART_EXPR)
4366
    {
4367
      expr = &TREE_OPERAND (*expr, 0);
4368
      convert = true;
4369
    }
4370
 
4371
  base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4372
  if (!base || size == -1 || max_size == -1)
4373
    return false;
4374
 
4375
  if (TREE_CODE (base) == MEM_REF)
4376
    {
4377
      offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4378
      base = TREE_OPERAND (base, 0);
4379
    }
4380
 
4381
  base = get_ssa_base_param (base);
4382
  if (!base || TREE_CODE (base) != PARM_DECL)
4383
    return false;
4384
 
4385
  for (i = 0; i < len; i++)
4386
    {
4387
      adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4388
 
4389
      if (adj->base == base &&
4390
          (adj->offset == offset || adj->remove_param))
4391
        {
4392
          cand = adj;
4393
          break;
4394
        }
4395
    }
4396
  if (!cand || cand->copy_param || cand->remove_param)
4397
    return false;
4398
 
4399
  if (cand->by_ref)
4400
    src = build_simple_mem_ref (cand->reduction);
4401
  else
4402
    src = cand->reduction;
4403
 
4404
  if (dump_file && (dump_flags & TDF_DETAILS))
4405
    {
4406
      fprintf (dump_file, "About to replace expr ");
4407
      print_generic_expr (dump_file, *expr, 0);
4408
      fprintf (dump_file, " with ");
4409
      print_generic_expr (dump_file, src, 0);
4410
      fprintf (dump_file, "\n");
4411
    }
4412
 
4413
  if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4414
    {
4415
      tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4416
      *expr = vce;
4417
    }
4418
  else
4419
    *expr = src;
4420
  return true;
4421
}
4422
 
4423
/* If the statement pointed to by STMT_PTR contains any expressions that need
4424
   to replaced with a different one as noted by ADJUSTMENTS, do so.  Handle any
4425
   potential type incompatibilities (GSI is used to accommodate conversion
4426
   statements and must point to the statement).  Return true iff the statement
4427
   was modified.  */
4428
 
4429
static bool
4430
sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4431
                       ipa_parm_adjustment_vec adjustments)
4432
{
4433
  gimple stmt = *stmt_ptr;
4434
  tree *lhs_p, *rhs_p;
4435
  bool any;
4436
 
4437
  if (!gimple_assign_single_p (stmt))
4438
    return false;
4439
 
4440
  rhs_p = gimple_assign_rhs1_ptr (stmt);
4441
  lhs_p = gimple_assign_lhs_ptr (stmt);
4442
 
4443
  any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4444
  any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4445
  if (any)
4446
    {
4447
      tree new_rhs = NULL_TREE;
4448
 
4449
      if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4450
        {
4451
          if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4452
            {
4453
              /* V_C_Es of constructors can cause trouble (PR 42714).  */
4454
              if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4455
                *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4456
              else
4457
                *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
4458
            }
4459
          else
4460
            new_rhs = fold_build1_loc (gimple_location (stmt),
4461
                                       VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4462
                                       *rhs_p);
4463
        }
4464
      else if (REFERENCE_CLASS_P (*rhs_p)
4465
               && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4466
               && !is_gimple_reg (*lhs_p))
4467
        /* This can happen when an assignment in between two single field
4468
           structures is turned into an assignment in between two pointers to
4469
           scalars (PR 42237).  */
4470
        new_rhs = *rhs_p;
4471
 
4472
      if (new_rhs)
4473
        {
4474
          tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4475
                                               true, GSI_SAME_STMT);
4476
 
4477
          gimple_assign_set_rhs_from_tree (gsi, tmp);
4478
        }
4479
 
4480
      return true;
4481
    }
4482
 
4483
  return false;
4484
}
4485
 
4486
/* Traverse the function body and all modifications as described in
4487
   ADJUSTMENTS.  Return true iff the CFG has been changed.  */
4488
 
4489
static bool
4490
ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4491
{
4492
  bool cfg_changed = false;
4493
  basic_block bb;
4494
 
4495
  FOR_EACH_BB (bb)
4496
    {
4497
      gimple_stmt_iterator gsi;
4498
 
4499
      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4500
        replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4501
 
4502
      gsi = gsi_start_bb (bb);
4503
      while (!gsi_end_p (gsi))
4504
        {
4505
          gimple stmt = gsi_stmt (gsi);
4506
          bool modified = false;
4507
          tree *t;
4508
          unsigned i;
4509
 
4510
          switch (gimple_code (stmt))
4511
            {
4512
            case GIMPLE_RETURN:
4513
              t = gimple_return_retval_ptr (stmt);
4514
              if (*t != NULL_TREE)
4515
                modified |= sra_ipa_modify_expr (t, true, adjustments);
4516
              break;
4517
 
4518
            case GIMPLE_ASSIGN:
4519
              modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4520
              modified |= replace_removed_params_ssa_names (stmt, adjustments);
4521
              break;
4522
 
4523
            case GIMPLE_CALL:
4524
              /* Operands must be processed before the lhs.  */
4525
              for (i = 0; i < gimple_call_num_args (stmt); i++)
4526
                {
4527
                  t = gimple_call_arg_ptr (stmt, i);
4528
                  modified |= sra_ipa_modify_expr (t, true, adjustments);
4529
                }
4530
 
4531
              if (gimple_call_lhs (stmt))
4532
                {
4533
                  t = gimple_call_lhs_ptr (stmt);
4534
                  modified |= sra_ipa_modify_expr (t, false, adjustments);
4535
                  modified |= replace_removed_params_ssa_names (stmt,
4536
                                                                adjustments);
4537
                }
4538
              break;
4539
 
4540
            case GIMPLE_ASM:
4541
              for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4542
                {
4543
                  t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4544
                  modified |= sra_ipa_modify_expr (t, true, adjustments);
4545
                }
4546
              for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4547
                {
4548
                  t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4549
                  modified |= sra_ipa_modify_expr (t, false, adjustments);
4550
                }
4551
              break;
4552
 
4553
            default:
4554
              break;
4555
            }
4556
 
4557
          if (modified)
4558
            {
4559
              update_stmt (stmt);
4560
              if (maybe_clean_eh_stmt (stmt)
4561
                  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4562
                cfg_changed = true;
4563
            }
4564
          gsi_next (&gsi);
4565
        }
4566
    }
4567
 
4568
  return cfg_changed;
4569
}
4570
 
4571
/* Call gimple_debug_bind_reset_value on all debug statements describing
4572
   gimple register parameters that are being removed or replaced.  */
4573
 
4574
static void
4575
sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4576
{
4577
  int i, len;
4578
  gimple_stmt_iterator *gsip = NULL, gsi;
4579
 
4580
  if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
4581
    {
4582
      gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
4583
      gsip = &gsi;
4584
    }
4585
  len = VEC_length (ipa_parm_adjustment_t, adjustments);
4586
  for (i = 0; i < len; i++)
4587
    {
4588
      struct ipa_parm_adjustment *adj;
4589
      imm_use_iterator ui;
4590
      gimple stmt, def_temp;
4591
      tree name, vexpr, copy = NULL_TREE;
4592
      use_operand_p use_p;
4593
 
4594
      adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4595
      if (adj->copy_param || !is_gimple_reg (adj->base))
4596
        continue;
4597
      name = gimple_default_def (cfun, adj->base);
4598
      vexpr = NULL;
4599
      if (name)
4600
        FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4601
          {
4602
            /* All other users must have been removed by
4603
               ipa_sra_modify_function_body.  */
4604
            gcc_assert (is_gimple_debug (stmt));
4605
            if (vexpr == NULL && gsip != NULL)
4606
              {
4607
                gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4608
                vexpr = make_node (DEBUG_EXPR_DECL);
4609
                def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4610
                                                           NULL);
4611
                DECL_ARTIFICIAL (vexpr) = 1;
4612
                TREE_TYPE (vexpr) = TREE_TYPE (name);
4613
                DECL_MODE (vexpr) = DECL_MODE (adj->base);
4614
                gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4615
              }
4616
            if (vexpr)
4617
              {
4618
                FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4619
                  SET_USE (use_p, vexpr);
4620
              }
4621
            else
4622
              gimple_debug_bind_reset_value (stmt);
4623
            update_stmt (stmt);
4624
          }
4625
      /* Create a VAR_DECL for debug info purposes.  */
4626
      if (!DECL_IGNORED_P (adj->base))
4627
        {
4628
          copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4629
                             VAR_DECL, DECL_NAME (adj->base),
4630
                             TREE_TYPE (adj->base));
4631
          if (DECL_PT_UID_SET_P (adj->base))
4632
            SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4633
          TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4634
          TREE_READONLY (copy) = TREE_READONLY (adj->base);
4635
          TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4636
          DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4637
          DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4638
          DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4639
          DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4640
          DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4641
          SET_DECL_RTL (copy, 0);
4642
          TREE_USED (copy) = 1;
4643
          DECL_CONTEXT (copy) = current_function_decl;
4644
          add_referenced_var (copy);
4645
          add_local_decl (cfun, copy);
4646
          DECL_CHAIN (copy) =
4647
            BLOCK_VARS (DECL_INITIAL (current_function_decl));
4648
          BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4649
        }
4650
      if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4651
        {
4652
          gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4653
          if (vexpr)
4654
            def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4655
          else
4656
            def_temp = gimple_build_debug_source_bind (copy, adj->base,
4657
                                                       NULL);
4658
          gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4659
        }
4660
    }
4661
}
4662
 
4663
/* Return false iff all callers have at least as many actual arguments as there
4664
   are formal parameters in the current function.  */
4665
 
4666
static bool
4667
not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4668
                                         void *data ATTRIBUTE_UNUSED)
4669
{
4670
  struct cgraph_edge *cs;
4671
  for (cs = node->callers; cs; cs = cs->next_caller)
4672
    if (!callsite_has_enough_arguments_p (cs->call_stmt))
4673
      return true;
4674
 
4675
  return false;
4676
}
4677
 
4678
/* Convert all callers of NODE.  */
4679
 
4680
static bool
4681
convert_callers_for_node (struct cgraph_node *node,
4682
                          void *data)
4683
{
4684
  ipa_parm_adjustment_vec adjustments = (ipa_parm_adjustment_vec)data;
4685
  bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4686
  struct cgraph_edge *cs;
4687
 
4688
  for (cs = node->callers; cs; cs = cs->next_caller)
4689
    {
4690
      current_function_decl = cs->caller->decl;
4691
      push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4692
 
4693
      if (dump_file)
4694
        fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4695
                 cs->caller->uid, cs->callee->uid,
4696
                 cgraph_node_name (cs->caller),
4697
                 cgraph_node_name (cs->callee));
4698
 
4699
      ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4700
 
4701
      pop_cfun ();
4702
    }
4703
 
4704
  for (cs = node->callers; cs; cs = cs->next_caller)
4705
    if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4706
        && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4707
      compute_inline_parameters (cs->caller, true);
4708
  BITMAP_FREE (recomputed_callers);
4709
 
4710
  return true;
4711
}
4712
 
4713
/* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS.  */
4714
 
4715
static void
4716
convert_callers (struct cgraph_node *node, tree old_decl,
4717
                 ipa_parm_adjustment_vec adjustments)
4718
{
4719
  tree old_cur_fndecl = current_function_decl;
4720
  basic_block this_block;
4721
 
4722
  cgraph_for_node_and_aliases (node, convert_callers_for_node,
4723
                               adjustments, false);
4724
 
4725
  current_function_decl = old_cur_fndecl;
4726
 
4727
  if (!encountered_recursive_call)
4728
    return;
4729
 
4730
  FOR_EACH_BB (this_block)
4731
    {
4732
      gimple_stmt_iterator gsi;
4733
 
4734
      for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4735
        {
4736
          gimple stmt = gsi_stmt (gsi);
4737
          tree call_fndecl;
4738
          if (gimple_code (stmt) != GIMPLE_CALL)
4739
            continue;
4740
          call_fndecl = gimple_call_fndecl (stmt);
4741
          if (call_fndecl == old_decl)
4742
            {
4743
              if (dump_file)
4744
                fprintf (dump_file, "Adjusting recursive call");
4745
              gimple_call_set_fndecl (stmt, node->decl);
4746
              ipa_modify_call_arguments (NULL, stmt, adjustments);
4747
            }
4748
        }
4749
    }
4750
 
4751
  return;
4752
}
4753
 
4754
/* Perform all the modification required in IPA-SRA for NODE to have parameters
4755
   as given in ADJUSTMENTS.  Return true iff the CFG has been changed.  */
4756
 
4757
static bool
4758
modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4759
{
4760
  struct cgraph_node *new_node;
4761
  bool cfg_changed;
4762
  VEC (cgraph_edge_p, heap) * redirect_callers = collect_callers_of_node (node);
4763
 
4764
  rebuild_cgraph_edges ();
4765
  free_dominance_info (CDI_DOMINATORS);
4766
  pop_cfun ();
4767
  current_function_decl = NULL_TREE;
4768
 
4769
  new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
4770
                                         false, NULL, NULL, "isra");
4771
  current_function_decl = new_node->decl;
4772
  push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4773
 
4774
  ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4775
  cfg_changed = ipa_sra_modify_function_body (adjustments);
4776
  sra_ipa_reset_debug_stmts (adjustments);
4777
  convert_callers (new_node, node->decl, adjustments);
4778
  cgraph_make_node_local (new_node);
4779
  return cfg_changed;
4780
}
4781
 
4782
/* Return false the function is apparently unsuitable for IPA-SRA based on it's
4783
   attributes, return true otherwise.  NODE is the cgraph node of the current
4784
   function.  */
4785
 
4786
static bool
4787
ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4788
{
4789
  if (!cgraph_node_can_be_local_p (node))
4790
    {
4791
      if (dump_file)
4792
        fprintf (dump_file, "Function not local to this compilation unit.\n");
4793
      return false;
4794
    }
4795
 
4796
  if (!node->local.can_change_signature)
4797
    {
4798
      if (dump_file)
4799
        fprintf (dump_file, "Function can not change signature.\n");
4800
      return false;
4801
    }
4802
 
4803
  if (!tree_versionable_function_p (node->decl))
4804
    {
4805
      if (dump_file)
4806
        fprintf (dump_file, "Function is not versionable.\n");
4807
      return false;
4808
    }
4809
 
4810
  if (DECL_VIRTUAL_P (current_function_decl))
4811
    {
4812
      if (dump_file)
4813
        fprintf (dump_file, "Function is a virtual method.\n");
4814
      return false;
4815
    }
4816
 
4817
  if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4818
      && inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
4819
    {
4820
      if (dump_file)
4821
        fprintf (dump_file, "Function too big to be made truly local.\n");
4822
      return false;
4823
    }
4824
 
4825
  if (!node->callers)
4826
    {
4827
      if (dump_file)
4828
        fprintf (dump_file,
4829
                 "Function has no callers in this compilation unit.\n");
4830
      return false;
4831
    }
4832
 
4833
  if (cfun->stdarg)
4834
    {
4835
      if (dump_file)
4836
        fprintf (dump_file, "Function uses stdarg. \n");
4837
      return false;
4838
    }
4839
 
4840
  if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4841
    return false;
4842
 
4843
  return true;
4844
}
4845
 
4846
/* Perform early interprocedural SRA.  */
4847
 
4848
static unsigned int
4849
ipa_early_sra (void)
4850
{
4851
  struct cgraph_node *node = cgraph_get_node (current_function_decl);
4852
  ipa_parm_adjustment_vec adjustments;
4853
  int ret = 0;
4854
 
4855
  if (!ipa_sra_preliminary_function_checks (node))
4856
    return 0;
4857
 
4858
  sra_initialize ();
4859
  sra_mode = SRA_MODE_EARLY_IPA;
4860
 
4861
  if (!find_param_candidates ())
4862
    {
4863
      if (dump_file)
4864
        fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4865
      goto simple_out;
4866
    }
4867
 
4868
  if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
4869
                                   NULL, true))
4870
    {
4871
      if (dump_file)
4872
        fprintf (dump_file, "There are callers with insufficient number of "
4873
                 "arguments.\n");
4874
      goto simple_out;
4875
    }
4876
 
4877
  bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4878
                                 func_param_count
4879
                                 * last_basic_block_for_function (cfun));
4880
  final_bbs = BITMAP_ALLOC (NULL);
4881
 
4882
  scan_function ();
4883
  if (encountered_apply_args)
4884
    {
4885
      if (dump_file)
4886
        fprintf (dump_file, "Function calls  __builtin_apply_args().\n");
4887
      goto out;
4888
    }
4889
 
4890
  if (encountered_unchangable_recursive_call)
4891
    {
4892
      if (dump_file)
4893
        fprintf (dump_file, "Function calls itself with insufficient "
4894
                 "number of arguments.\n");
4895
      goto out;
4896
    }
4897
 
4898
  adjustments = analyze_all_param_acesses ();
4899
  if (!adjustments)
4900
    goto out;
4901
  if (dump_file)
4902
    ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4903
 
4904
  if (modify_function (node, adjustments))
4905
    ret = TODO_update_ssa | TODO_cleanup_cfg;
4906
  else
4907
    ret = TODO_update_ssa;
4908
  VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4909
 
4910
  statistics_counter_event (cfun, "Unused parameters deleted",
4911
                            sra_stats.deleted_unused_parameters);
4912
  statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4913
                            sra_stats.scalar_by_ref_to_by_val);
4914
  statistics_counter_event (cfun, "Aggregate parameters broken up",
4915
                            sra_stats.aggregate_params_reduced);
4916
  statistics_counter_event (cfun, "Aggregate parameter components created",
4917
                            sra_stats.param_reductions_created);
4918
 
4919
 out:
4920
  BITMAP_FREE (final_bbs);
4921
  free (bb_dereferences);
4922
 simple_out:
4923
  sra_deinitialize ();
4924
  return ret;
4925
}
4926
 
4927
/* Return if early ipa sra shall be performed.  */
4928
static bool
4929
ipa_early_sra_gate (void)
4930
{
4931
  return flag_ipa_sra && dbg_cnt (eipa_sra);
4932
}
4933
 
4934
struct gimple_opt_pass pass_early_ipa_sra =
4935
{
4936
 {
4937
  GIMPLE_PASS,
4938
  "eipa_sra",                           /* name */
4939
  ipa_early_sra_gate,                   /* gate */
4940
  ipa_early_sra,                        /* execute */
4941
  NULL,                                 /* sub */
4942
  NULL,                                 /* next */
4943
  0,                                     /* static_pass_number */
4944
  TV_IPA_SRA,                           /* tv_id */
4945
  0,                                     /* properties_required */
4946
  0,                                     /* properties_provided */
4947
  0,                                     /* properties_destroyed */
4948
  0,                                     /* todo_flags_start */
4949
  TODO_dump_cgraph                      /* todo_flags_finish */
4950
 }
4951
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.