OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [tree-sra.c] - Blame information for rev 38

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Scalar Replacement of Aggregates (SRA) converts some structure
2
   references into scalar references, exposing them to the scalar
3
   optimizers.
4
   Copyright (C) 2003, 2004, 2005, 2006, 2007
5
   Free Software Foundation, Inc.
6
   Contributed by Diego Novillo <dnovillo@redhat.com>
7
 
8
This file is part of GCC.
9
 
10
GCC is free software; you can redistribute it and/or modify it
11
under the terms of the GNU General Public License as published by the
12
Free Software Foundation; either version 3, or (at your option) any
13
later version.
14
 
15
GCC is distributed in the hope that it will be useful, but WITHOUT
16
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
18
for more details.
19
 
20
You should have received a copy of the GNU General Public License
21
along with GCC; see the file COPYING3.  If not see
22
<http://www.gnu.org/licenses/>.  */
23
 
24
#include "config.h"
25
#include "system.h"
26
#include "coretypes.h"
27
#include "tm.h"
28
#include "ggc.h"
29
#include "tree.h"
30
 
31
/* These RTL headers are needed for basic-block.h.  */
32
#include "rtl.h"
33
#include "tm_p.h"
34
#include "hard-reg-set.h"
35
#include "basic-block.h"
36
#include "diagnostic.h"
37
#include "langhooks.h"
38
#include "tree-inline.h"
39
#include "tree-flow.h"
40
#include "tree-gimple.h"
41
#include "tree-dump.h"
42
#include "tree-pass.h"
43
#include "timevar.h"
44
#include "flags.h"
45
#include "bitmap.h"
46
#include "obstack.h"
47
#include "target.h"
48
/* expr.h is needed for MOVE_RATIO.  */
49
#include "expr.h"
50
#include "params.h"
51
 
52
 
53
/* This object of this pass is to replace a non-addressable aggregate with a
54
   set of independent variables.  Most of the time, all of these variables
55
   will be scalars.  But a secondary objective is to break up larger
56
   aggregates into smaller aggregates.  In the process we may find that some
57
   bits of the larger aggregate can be deleted as unreferenced.
58
 
59
   This substitution is done globally.  More localized substitutions would
60
   be the purvey of a load-store motion pass.
61
 
62
   The optimization proceeds in phases:
63
 
64
     (1) Identify variables that have types that are candidates for
65
         decomposition.
66
 
67
     (2) Scan the function looking for the ways these variables are used.
68
         In particular we're interested in the number of times a variable
69
         (or member) is needed as a complete unit, and the number of times
70
         a variable (or member) is copied.
71
 
72
     (3) Based on the usage profile, instantiate substitution variables.
73
 
74
     (4) Scan the function making replacements.
75
*/
76
 
77
 
78
/* The set of todo flags to return from tree_sra.  */
79
static unsigned int todoflags;
80
 
81
/* The set of aggregate variables that are candidates for scalarization.  */
82
static bitmap sra_candidates;
83
 
84
/* Set of scalarizable PARM_DECLs that need copy-in operations at the
85
   beginning of the function.  */
86
static bitmap needs_copy_in;
87
 
88
/* Sets of bit pairs that cache type decomposition and instantiation.  */
89
static bitmap sra_type_decomp_cache;
90
static bitmap sra_type_inst_cache;
91
 
92
/* One of these structures is created for each candidate aggregate and
93
   each (accessed) member or group of members of such an aggregate.  */
94
struct sra_elt
95
{
96
  /* A tree of the elements.  Used when we want to traverse everything.  */
97
  struct sra_elt *parent;
98
  struct sra_elt *groups;
99
  struct sra_elt *children;
100
  struct sra_elt *sibling;
101
 
102
  /* If this element is a root, then this is the VAR_DECL.  If this is
103
     a sub-element, this is some token used to identify the reference.
104
     In the case of COMPONENT_REF, this is the FIELD_DECL.  In the case
105
     of an ARRAY_REF, this is the (constant) index.  In the case of an
106
     ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR.  In the case
107
     of a complex number, this is a zero or one.  */
108
  tree element;
109
 
110
  /* The type of the element.  */
111
  tree type;
112
 
113
  /* A VAR_DECL, for any sub-element we've decided to replace.  */
114
  tree replacement;
115
 
116
  /* The number of times the element is referenced as a whole.  I.e.
117
     given "a.b.c", this would be incremented for C, but not for A or B.  */
118
  unsigned int n_uses;
119
 
120
  /* The number of times the element is copied to or from another
121
     scalarizable element.  */
122
  unsigned int n_copies;
123
 
124
  /* True if TYPE is scalar.  */
125
  bool is_scalar;
126
 
127
  /* True if this element is a group of members of its parent.  */
128
  bool is_group;
129
 
130
  /* True if we saw something about this element that prevents scalarization,
131
     such as non-constant indexing.  */
132
  bool cannot_scalarize;
133
 
134
  /* True if we've decided that structure-to-structure assignment
135
     should happen via memcpy and not per-element.  */
136
  bool use_block_copy;
137
 
138
  /* True if everything under this element has been marked TREE_NO_WARNING.  */
139
  bool all_no_warning;
140
 
141
  /* A flag for use with/after random access traversals.  */
142
  bool visited;
143
};
144
 
145
#define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
146
 
147
#define FOR_EACH_ACTUAL_CHILD(CHILD, ELT)                       \
148
  for ((CHILD) = (ELT)->is_group                                \
149
                 ? next_child_for_group (NULL, (ELT))           \
150
                 : (ELT)->children;                             \
151
       (CHILD);                                                 \
152
       (CHILD) = (ELT)->is_group                                \
153
                 ? next_child_for_group ((CHILD), (ELT))        \
154
                 : (CHILD)->sibling)
155
 
156
/* Helper function for above macro.  Return next child in group.  */
157
static struct sra_elt *
158
next_child_for_group (struct sra_elt *child, struct sra_elt *group)
159
{
160
  gcc_assert (group->is_group);
161
 
162
  /* Find the next child in the parent.  */
163
  if (child)
164
    child = child->sibling;
165
  else
166
    child = group->parent->children;
167
 
168
  /* Skip siblings that do not belong to the group.  */
169
  while (child)
170
    {
171
      tree g_elt = group->element;
172
      if (TREE_CODE (g_elt) == RANGE_EXPR)
173
        {
174
          if (!tree_int_cst_lt (child->element, TREE_OPERAND (g_elt, 0))
175
              && !tree_int_cst_lt (TREE_OPERAND (g_elt, 1), child->element))
176
            break;
177
        }
178
      else
179
        gcc_unreachable ();
180
 
181
      child = child->sibling;
182
    }
183
 
184
  return child;
185
}
186
 
187
/* Random access to the child of a parent is performed by hashing.
188
   This prevents quadratic behavior, and allows SRA to function
189
   reasonably on larger records.  */
190
static htab_t sra_map;
191
 
192
/* All structures are allocated out of the following obstack.  */
193
static struct obstack sra_obstack;
194
 
195
/* Debugging functions.  */
196
static void dump_sra_elt_name (FILE *, struct sra_elt *);
197
extern void debug_sra_elt_name (struct sra_elt *);
198
 
199
/* Forward declarations.  */
200
static tree generate_element_ref (struct sra_elt *);
201
 
202
/* Return true if DECL is an SRA candidate.  */
203
 
204
static bool
205
is_sra_candidate_decl (tree decl)
206
{
207
  return DECL_P (decl) && bitmap_bit_p (sra_candidates, DECL_UID (decl));
208
}
209
 
210
/* Return true if TYPE is a scalar type.  */
211
 
212
static bool
213
is_sra_scalar_type (tree type)
214
{
215
  enum tree_code code = TREE_CODE (type);
216
  return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
217
          || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
218
          || code == POINTER_TYPE || code == OFFSET_TYPE
219
          || code == REFERENCE_TYPE);
220
}
221
 
222
/* Return true if TYPE can be decomposed into a set of independent variables.
223
 
224
   Note that this doesn't imply that all elements of TYPE can be
225
   instantiated, just that if we decide to break up the type into
226
   separate pieces that it can be done.  */
227
 
228
bool
229
sra_type_can_be_decomposed_p (tree type)
230
{
231
  unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
232
  tree t;
233
 
234
  /* Avoid searching the same type twice.  */
235
  if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
236
    return true;
237
  if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
238
    return false;
239
 
240
  /* The type must have a definite nonzero size.  */
241
  if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
242
      || integer_zerop (TYPE_SIZE (type)))
243
    goto fail;
244
 
245
  /* The type must be a non-union aggregate.  */
246
  switch (TREE_CODE (type))
247
    {
248
    case RECORD_TYPE:
249
      {
250
        bool saw_one_field = false;
251
 
252
        for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
253
          if (TREE_CODE (t) == FIELD_DECL)
254
            {
255
              /* Reject incorrectly represented bit fields.  */
256
              if (DECL_BIT_FIELD (t)
257
                  && (tree_low_cst (DECL_SIZE (t), 1)
258
                      != TYPE_PRECISION (TREE_TYPE (t))))
259
                goto fail;
260
 
261
              saw_one_field = true;
262
            }
263
 
264
        /* Record types must have at least one field.  */
265
        if (!saw_one_field)
266
          goto fail;
267
      }
268
      break;
269
 
270
    case ARRAY_TYPE:
271
      /* Array types must have a fixed lower and upper bound.  */
272
      t = TYPE_DOMAIN (type);
273
      if (t == NULL)
274
        goto fail;
275
      if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
276
        goto fail;
277
      if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
278
        goto fail;
279
      break;
280
 
281
    case COMPLEX_TYPE:
282
      break;
283
 
284
    default:
285
      goto fail;
286
    }
287
 
288
  bitmap_set_bit (sra_type_decomp_cache, cache+0);
289
  return true;
290
 
291
 fail:
292
  bitmap_set_bit (sra_type_decomp_cache, cache+1);
293
  return false;
294
}
295
 
296
/* Return true if DECL can be decomposed into a set of independent
297
   (though not necessarily scalar) variables.  */
298
 
299
static bool
300
decl_can_be_decomposed_p (tree var)
301
{
302
  /* Early out for scalars.  */
303
  if (is_sra_scalar_type (TREE_TYPE (var)))
304
    return false;
305
 
306
  /* The variable must not be aliased.  */
307
  if (!is_gimple_non_addressable (var))
308
    {
309
      if (dump_file && (dump_flags & TDF_DETAILS))
310
        {
311
          fprintf (dump_file, "Cannot scalarize variable ");
312
          print_generic_expr (dump_file, var, dump_flags);
313
          fprintf (dump_file, " because it must live in memory\n");
314
        }
315
      return false;
316
    }
317
 
318
  /* The variable must not be volatile.  */
319
  if (TREE_THIS_VOLATILE (var))
320
    {
321
      if (dump_file && (dump_flags & TDF_DETAILS))
322
        {
323
          fprintf (dump_file, "Cannot scalarize variable ");
324
          print_generic_expr (dump_file, var, dump_flags);
325
          fprintf (dump_file, " because it is declared volatile\n");
326
        }
327
      return false;
328
    }
329
 
330
  /* We must be able to decompose the variable's type.  */
331
  if (!sra_type_can_be_decomposed_p (TREE_TYPE (var)))
332
    {
333
      if (dump_file && (dump_flags & TDF_DETAILS))
334
        {
335
          fprintf (dump_file, "Cannot scalarize variable ");
336
          print_generic_expr (dump_file, var, dump_flags);
337
          fprintf (dump_file, " because its type cannot be decomposed\n");
338
        }
339
      return false;
340
    }
341
 
342
  return true;
343
}
344
 
345
/* Return true if TYPE can be *completely* decomposed into scalars.  */
346
 
347
static bool
348
type_can_instantiate_all_elements (tree type)
349
{
350
  if (is_sra_scalar_type (type))
351
    return true;
352
  if (!sra_type_can_be_decomposed_p (type))
353
    return false;
354
 
355
  switch (TREE_CODE (type))
356
    {
357
    case RECORD_TYPE:
358
      {
359
        unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
360
        tree f;
361
 
362
        if (bitmap_bit_p (sra_type_inst_cache, cache+0))
363
          return true;
364
        if (bitmap_bit_p (sra_type_inst_cache, cache+1))
365
          return false;
366
 
367
        for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
368
          if (TREE_CODE (f) == FIELD_DECL)
369
            {
370
              if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
371
                {
372
                  bitmap_set_bit (sra_type_inst_cache, cache+1);
373
                  return false;
374
                }
375
            }
376
 
377
        bitmap_set_bit (sra_type_inst_cache, cache+0);
378
        return true;
379
      }
380
 
381
    case ARRAY_TYPE:
382
      return type_can_instantiate_all_elements (TREE_TYPE (type));
383
 
384
    case COMPLEX_TYPE:
385
      return true;
386
 
387
    default:
388
      gcc_unreachable ();
389
    }
390
}
391
 
392
/* Test whether ELT or some sub-element cannot be scalarized.  */
393
 
394
static bool
395
can_completely_scalarize_p (struct sra_elt *elt)
396
{
397
  struct sra_elt *c;
398
 
399
  if (elt->cannot_scalarize)
400
    return false;
401
 
402
  for (c = elt->children; c; c = c->sibling)
403
    if (!can_completely_scalarize_p (c))
404
      return false;
405
 
406
  for (c = elt->groups; c; c = c->sibling)
407
    if (!can_completely_scalarize_p (c))
408
      return false;
409
 
410
  return true;
411
}
412
 
413
 
414
/* A simplified tree hashing algorithm that only handles the types of
415
   trees we expect to find in sra_elt->element.  */
416
 
417
static hashval_t
418
sra_hash_tree (tree t)
419
{
420
  hashval_t h;
421
 
422
  switch (TREE_CODE (t))
423
    {
424
    case VAR_DECL:
425
    case PARM_DECL:
426
    case RESULT_DECL:
427
      h = DECL_UID (t);
428
      break;
429
 
430
    case INTEGER_CST:
431
      h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
432
      break;
433
 
434
    case RANGE_EXPR:
435
      h = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
436
      h = iterative_hash_expr (TREE_OPERAND (t, 1), h);
437
      break;
438
 
439
    case FIELD_DECL:
440
      /* We can have types that are compatible, but have different member
441
         lists, so we can't hash fields by ID.  Use offsets instead.  */
442
      h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
443
      h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
444
      break;
445
 
446
    default:
447
      gcc_unreachable ();
448
    }
449
 
450
  return h;
451
}
452
 
453
/* Hash function for type SRA_PAIR.  */
454
 
455
static hashval_t
456
sra_elt_hash (const void *x)
457
{
458
  const struct sra_elt *e = x;
459
  const struct sra_elt *p;
460
  hashval_t h;
461
 
462
  h = sra_hash_tree (e->element);
463
 
464
  /* Take into account everything back up the chain.  Given that chain
465
     lengths are rarely very long, this should be acceptable.  If we
466
     truly identify this as a performance problem, it should work to
467
     hash the pointer value "e->parent".  */
468
  for (p = e->parent; p ; p = p->parent)
469
    h = (h * 65521) ^ sra_hash_tree (p->element);
470
 
471
  return h;
472
}
473
 
474
/* Equality function for type SRA_PAIR.  */
475
 
476
static int
477
sra_elt_eq (const void *x, const void *y)
478
{
479
  const struct sra_elt *a = x;
480
  const struct sra_elt *b = y;
481
  tree ae, be;
482
 
483
  if (a->parent != b->parent)
484
    return false;
485
 
486
  ae = a->element;
487
  be = b->element;
488
 
489
  if (ae == be)
490
    return true;
491
  if (TREE_CODE (ae) != TREE_CODE (be))
492
    return false;
493
 
494
  switch (TREE_CODE (ae))
495
    {
496
    case VAR_DECL:
497
    case PARM_DECL:
498
    case RESULT_DECL:
499
      /* These are all pointer unique.  */
500
      return false;
501
 
502
    case INTEGER_CST:
503
      /* Integers are not pointer unique, so compare their values.  */
504
      return tree_int_cst_equal (ae, be);
505
 
506
    case RANGE_EXPR:
507
      return
508
        tree_int_cst_equal (TREE_OPERAND (ae, 0), TREE_OPERAND (be, 0))
509
        && tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1));
510
 
511
    case FIELD_DECL:
512
      /* Fields are unique within a record, but not between
513
         compatible records.  */
514
      if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
515
        return false;
516
      return fields_compatible_p (ae, be);
517
 
518
    default:
519
      gcc_unreachable ();
520
    }
521
}
522
 
523
/* Create or return the SRA_ELT structure for CHILD in PARENT.  PARENT
524
   may be null, in which case CHILD must be a DECL.  */
525
 
526
static struct sra_elt *
527
lookup_element (struct sra_elt *parent, tree child, tree type,
528
                enum insert_option insert)
529
{
530
  struct sra_elt dummy;
531
  struct sra_elt **slot;
532
  struct sra_elt *elt;
533
 
534
  if (parent)
535
    dummy.parent = parent->is_group ? parent->parent : parent;
536
  else
537
    dummy.parent = NULL;
538
  dummy.element = child;
539
 
540
  slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
541
  if (!slot && insert == NO_INSERT)
542
    return NULL;
543
 
544
  elt = *slot;
545
  if (!elt && insert == INSERT)
546
    {
547
      *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
548
      memset (elt, 0, sizeof (*elt));
549
 
550
      elt->parent = parent;
551
      elt->element = child;
552
      elt->type = type;
553
      elt->is_scalar = is_sra_scalar_type (type);
554
 
555
      if (parent)
556
        {
557
          if (IS_ELEMENT_FOR_GROUP (elt->element))
558
            {
559
              elt->is_group = true;
560
              elt->sibling = parent->groups;
561
              parent->groups = elt;
562
            }
563
          else
564
            {
565
              elt->sibling = parent->children;
566
              parent->children = elt;
567
            }
568
        }
569
 
570
      /* If this is a parameter, then if we want to scalarize, we have
571
         one copy from the true function parameter.  Count it now.  */
572
      if (TREE_CODE (child) == PARM_DECL)
573
        {
574
          elt->n_copies = 1;
575
          bitmap_set_bit (needs_copy_in, DECL_UID (child));
576
        }
577
    }
578
 
579
  return elt;
580
}
581
 
582
/* Create or return the SRA_ELT structure for EXPR if the expression
583
   refers to a scalarizable variable.  */
584
 
585
static struct sra_elt *
586
maybe_lookup_element_for_expr (tree expr)
587
{
588
  struct sra_elt *elt;
589
  tree child;
590
 
591
  switch (TREE_CODE (expr))
592
    {
593
    case VAR_DECL:
594
    case PARM_DECL:
595
    case RESULT_DECL:
596
      if (is_sra_candidate_decl (expr))
597
        return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
598
      return NULL;
599
 
600
    case ARRAY_REF:
601
      /* We can't scalarize variable array indices.  */
602
      if (in_array_bounds_p (expr))
603
        child = TREE_OPERAND (expr, 1);
604
      else
605
        return NULL;
606
      break;
607
 
608
    case ARRAY_RANGE_REF:
609
      /* We can't scalarize variable array indices.  */
610
      if (range_in_array_bounds_p (expr))
611
        {
612
          tree domain = TYPE_DOMAIN (TREE_TYPE (expr));
613
          child = build2 (RANGE_EXPR, integer_type_node,
614
                          TYPE_MIN_VALUE (domain), TYPE_MAX_VALUE (domain));
615
        }
616
      else
617
        return NULL;
618
      break;
619
 
620
    case COMPONENT_REF:
621
      /* Don't look through unions.  */
622
      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) != RECORD_TYPE)
623
        return NULL;
624
      child = TREE_OPERAND (expr, 1);
625
      break;
626
 
627
    case REALPART_EXPR:
628
      child = integer_zero_node;
629
      break;
630
    case IMAGPART_EXPR:
631
      child = integer_one_node;
632
      break;
633
 
634
    default:
635
      return NULL;
636
    }
637
 
638
  elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
639
  if (elt)
640
    return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
641
  return NULL;
642
}
643
 
644
 
645
/* Functions to walk just enough of the tree to see all scalarizable
646
   references, and categorize them.  */
647
 
648
/* A set of callbacks for phases 2 and 4.  They'll be invoked for the
649
   various kinds of references seen.  In all cases, *BSI is an iterator
650
   pointing to the statement being processed.  */
651
struct sra_walk_fns
652
{
653
  /* Invoked when ELT is required as a unit.  Note that ELT might refer to
654
     a leaf node, in which case this is a simple scalar reference.  *EXPR_P
655
     points to the location of the expression.  IS_OUTPUT is true if this
656
     is a left-hand-side reference.  USE_ALL is true if we saw something we
657
     couldn't quite identify and had to force the use of the entire object.  */
658
  void (*use) (struct sra_elt *elt, tree *expr_p,
659
               block_stmt_iterator *bsi, bool is_output, bool use_all);
660
 
661
  /* Invoked when we have a copy between two scalarizable references.  */
662
  void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
663
                block_stmt_iterator *bsi);
664
 
665
  /* Invoked when ELT is initialized from a constant.  VALUE may be NULL,
666
     in which case it should be treated as an empty CONSTRUCTOR.  */
667
  void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
668
 
669
  /* Invoked when we have a copy between one scalarizable reference ELT
670
     and one non-scalarizable reference OTHER without side-effects.
671
     IS_OUTPUT is true if ELT is on the left-hand side.  */
672
  void (*ldst) (struct sra_elt *elt, tree other,
673
                block_stmt_iterator *bsi, bool is_output);
674
 
675
  /* True during phase 2, false during phase 4.  */
676
  /* ??? This is a hack.  */
677
  bool initial_scan;
678
};
679
 
680
#ifdef ENABLE_CHECKING
681
/* Invoked via walk_tree, if *TP contains a candidate decl, return it.  */
682
 
683
static tree
684
sra_find_candidate_decl (tree *tp, int *walk_subtrees,
685
                         void *data ATTRIBUTE_UNUSED)
686
{
687
  tree t = *tp;
688
  enum tree_code code = TREE_CODE (t);
689
 
690
  if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
691
    {
692
      *walk_subtrees = 0;
693
      if (is_sra_candidate_decl (t))
694
        return t;
695
    }
696
  else if (TYPE_P (t))
697
    *walk_subtrees = 0;
698
 
699
  return NULL;
700
}
701
#endif
702
 
703
/* Walk most expressions looking for a scalarizable aggregate.
704
   If we find one, invoke FNS->USE.  */
705
 
706
static void
707
sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
708
               const struct sra_walk_fns *fns)
709
{
710
  tree expr = *expr_p;
711
  tree inner = expr;
712
  bool disable_scalarization = false;
713
  bool use_all_p = false;
714
 
715
  /* We're looking to collect a reference expression between EXPR and INNER,
716
     such that INNER is a scalarizable decl and all other nodes through EXPR
717
     are references that we can scalarize.  If we come across something that
718
     we can't scalarize, we reset EXPR.  This has the effect of making it
719
     appear that we're referring to the larger expression as a whole.  */
720
 
721
  while (1)
722
    switch (TREE_CODE (inner))
723
      {
724
      case VAR_DECL:
725
      case PARM_DECL:
726
      case RESULT_DECL:
727
        /* If there is a scalarizable decl at the bottom, then process it.  */
728
        if (is_sra_candidate_decl (inner))
729
          {
730
            struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
731
            if (disable_scalarization)
732
              elt->cannot_scalarize = true;
733
            else
734
              fns->use (elt, expr_p, bsi, is_output, use_all_p);
735
          }
736
        return;
737
 
738
      case ARRAY_REF:
739
        /* Non-constant index means any member may be accessed.  Prevent the
740
           expression from being scalarized.  If we were to treat this as a
741
           reference to the whole array, we can wind up with a single dynamic
742
           index reference inside a loop being overridden by several constant
743
           index references during loop setup.  It's possible that this could
744
           be avoided by using dynamic usage counts based on BB trip counts
745
           (based on loop analysis or profiling), but that hardly seems worth
746
           the effort.  */
747
        /* ??? Hack.  Figure out how to push this into the scan routines
748
           without duplicating too much code.  */
749
        if (!in_array_bounds_p (inner))
750
          {
751
            disable_scalarization = true;
752
            goto use_all;
753
          }
754
        /* ??? Are we assured that non-constant bounds and stride will have
755
           the same value everywhere?  I don't think Fortran will...  */
756
        if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
757
          goto use_all;
758
        inner = TREE_OPERAND (inner, 0);
759
        break;
760
 
761
      case ARRAY_RANGE_REF:
762
        if (!range_in_array_bounds_p (inner))
763
          {
764
            disable_scalarization = true;
765
            goto use_all;
766
          }
767
        /* ??? See above non-constant bounds and stride .  */
768
        if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
769
          goto use_all;
770
        inner = TREE_OPERAND (inner, 0);
771
        break;
772
 
773
      case COMPONENT_REF:
774
        /* A reference to a union member constitutes a reference to the
775
           entire union.  */
776
        if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) != RECORD_TYPE)
777
          goto use_all;
778
        /* ??? See above re non-constant stride.  */
779
        if (TREE_OPERAND (inner, 2))
780
          goto use_all;
781
        inner = TREE_OPERAND (inner, 0);
782
        break;
783
 
784
      case REALPART_EXPR:
785
      case IMAGPART_EXPR:
786
        inner = TREE_OPERAND (inner, 0);
787
        break;
788
 
789
      case BIT_FIELD_REF:
790
        /* A bit field reference (access to *multiple* fields simultaneously)
791
           is not currently scalarized.  Consider this an access to the
792
           complete outer element, to which walk_tree will bring us next.  */
793
        goto use_all;
794
 
795
      case VIEW_CONVERT_EXPR:
796
      case NOP_EXPR:
797
        /* Similarly, a view/nop explicitly wants to look at an object in a
798
           type other than the one we've scalarized.  */
799
        goto use_all;
800
 
801
      case WITH_SIZE_EXPR:
802
        /* This is a transparent wrapper.  The entire inner expression really
803
           is being used.  */
804
        goto use_all;
805
 
806
      use_all:
807
        expr_p = &TREE_OPERAND (inner, 0);
808
        inner = expr = *expr_p;
809
        use_all_p = true;
810
        break;
811
 
812
      default:
813
#ifdef ENABLE_CHECKING
814
        /* Validate that we're not missing any references.  */
815
        gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
816
#endif
817
        return;
818
      }
819
}
820
 
821
/* Walk a TREE_LIST of values looking for scalarizable aggregates.
822
   If we find one, invoke FNS->USE.  */
823
 
824
static void
825
sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
826
                    const struct sra_walk_fns *fns)
827
{
828
  tree op;
829
  for (op = list; op ; op = TREE_CHAIN (op))
830
    sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
831
}
832
 
833
/* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
834
   If we find one, invoke FNS->USE.  */
835
 
836
static void
837
sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
838
                    const struct sra_walk_fns *fns)
839
{
840
  sra_walk_tree_list (TREE_OPERAND (expr, 1), bsi, false, fns);
841
}
842
 
843
/* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
844
   aggregates.  If we find one, invoke FNS->USE.  */
845
 
846
static void
847
sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
848
                   const struct sra_walk_fns *fns)
849
{
850
  sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
851
  sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
852
}
853
 
854
/* Walk a MODIFY_EXPR and categorize the assignment appropriately.  */
855
 
856
static void
857
sra_walk_modify_expr (tree expr, block_stmt_iterator *bsi,
858
                      const struct sra_walk_fns *fns)
859
{
860
  struct sra_elt *lhs_elt, *rhs_elt;
861
  tree lhs, rhs;
862
 
863
  lhs = TREE_OPERAND (expr, 0);
864
  rhs = TREE_OPERAND (expr, 1);
865
  lhs_elt = maybe_lookup_element_for_expr (lhs);
866
  rhs_elt = maybe_lookup_element_for_expr (rhs);
867
 
868
  /* If both sides are scalarizable, this is a COPY operation.  */
869
  if (lhs_elt && rhs_elt)
870
    {
871
      fns->copy (lhs_elt, rhs_elt, bsi);
872
      return;
873
    }
874
 
875
  /* If the RHS is scalarizable, handle it.  There are only two cases.  */
876
  if (rhs_elt)
877
    {
878
      if (!rhs_elt->is_scalar && !TREE_SIDE_EFFECTS (lhs))
879
        fns->ldst (rhs_elt, lhs, bsi, false);
880
      else
881
        fns->use (rhs_elt, &TREE_OPERAND (expr, 1), bsi, false, false);
882
    }
883
 
884
  /* If it isn't scalarizable, there may be scalarizable variables within, so
885
     check for a call or else walk the RHS to see if we need to do any
886
     copy-in operations.  We need to do it before the LHS is scalarized so
887
     that the statements get inserted in the proper place, before any
888
     copy-out operations.  */
889
  else
890
    {
891
      tree call = get_call_expr_in (rhs);
892
      if (call)
893
        sra_walk_call_expr (call, bsi, fns);
894
      else
895
        sra_walk_expr (&TREE_OPERAND (expr, 1), bsi, false, fns);
896
    }
897
 
898
  /* Likewise, handle the LHS being scalarizable.  We have cases similar
899
     to those above, but also want to handle RHS being constant.  */
900
  if (lhs_elt)
901
    {
902
      /* If this is an assignment from a constant, or constructor, then
903
         we have access to all of the elements individually.  Invoke INIT.  */
904
      if (TREE_CODE (rhs) == COMPLEX_EXPR
905
          || TREE_CODE (rhs) == COMPLEX_CST
906
          || TREE_CODE (rhs) == CONSTRUCTOR)
907
        fns->init (lhs_elt, rhs, bsi);
908
 
909
      /* If this is an assignment from read-only memory, treat this as if
910
         we'd been passed the constructor directly.  Invoke INIT.  */
911
      else if (TREE_CODE (rhs) == VAR_DECL
912
               && TREE_STATIC (rhs)
913
               && TREE_READONLY (rhs)
914
               && targetm.binds_local_p (rhs))
915
        fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
916
 
917
      /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
918
         The lvalue requirement prevents us from trying to directly scalarize
919
         the result of a function call.  Which would result in trying to call
920
         the function multiple times, and other evil things.  */
921
      else if (!lhs_elt->is_scalar
922
               && !TREE_SIDE_EFFECTS (rhs) && is_gimple_addressable (rhs))
923
        fns->ldst (lhs_elt, rhs, bsi, true);
924
 
925
      /* Otherwise we're being used in some context that requires the
926
         aggregate to be seen as a whole.  Invoke USE.  */
927
      else
928
        fns->use (lhs_elt, &TREE_OPERAND (expr, 0), bsi, true, false);
929
    }
930
 
931
  /* Similarly to above, LHS_ELT being null only means that the LHS as a
932
     whole is not a scalarizable reference.  There may be occurrences of
933
     scalarizable variables within, which implies a USE.  */
934
  else
935
    sra_walk_expr (&TREE_OPERAND (expr, 0), bsi, true, fns);
936
}
937
 
938
/* Entry point to the walk functions.  Search the entire function,
939
   invoking the callbacks in FNS on each of the references to
940
   scalarizable variables.  */
941
 
942
static void
943
sra_walk_function (const struct sra_walk_fns *fns)
944
{
945
  basic_block bb;
946
  block_stmt_iterator si, ni;
947
 
948
  /* ??? Phase 4 could derive some benefit to walking the function in
949
     dominator tree order.  */
950
 
951
  FOR_EACH_BB (bb)
952
    for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
953
      {
954
        tree stmt, t;
955
        stmt_ann_t ann;
956
 
957
        stmt = bsi_stmt (si);
958
        ann = stmt_ann (stmt);
959
 
960
        ni = si;
961
        bsi_next (&ni);
962
 
963
        /* If the statement has no virtual operands, then it doesn't
964
           make any structure references that we care about.  */
965
        if (ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
966
          continue;
967
 
968
        switch (TREE_CODE (stmt))
969
          {
970
          case RETURN_EXPR:
971
            /* If we have "return <retval>" then the return value is
972
               already exposed for our pleasure.  Walk it as a USE to
973
               force all the components back in place for the return.
974
 
975
               If we have an embedded assignment, then <retval> is of
976
               a type that gets returned in registers in this ABI, and
977
               we do not wish to extend their lifetimes.  Treat this
978
               as a USE of the variable on the RHS of this assignment.  */
979
 
980
            t = TREE_OPERAND (stmt, 0);
981
            if (TREE_CODE (t) == MODIFY_EXPR)
982
              sra_walk_expr (&TREE_OPERAND (t, 1), &si, false, fns);
983
            else
984
              sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
985
            break;
986
 
987
          case MODIFY_EXPR:
988
            sra_walk_modify_expr (stmt, &si, fns);
989
            break;
990
          case CALL_EXPR:
991
            sra_walk_call_expr (stmt, &si, fns);
992
            break;
993
          case ASM_EXPR:
994
            sra_walk_asm_expr (stmt, &si, fns);
995
            break;
996
 
997
          default:
998
            break;
999
          }
1000
      }
1001
}
1002
 
1003
/* Phase One: Scan all referenced variables in the program looking for
1004
   structures that could be decomposed.  */
1005
 
1006
static bool
1007
find_candidates_for_sra (void)
1008
{
1009
  bool any_set = false;
1010
  tree var;
1011
  referenced_var_iterator rvi;
1012
 
1013
  FOR_EACH_REFERENCED_VAR (var, rvi)
1014
    {
1015
      if (decl_can_be_decomposed_p (var))
1016
        {
1017
          bitmap_set_bit (sra_candidates, DECL_UID (var));
1018
          any_set = true;
1019
        }
1020
    }
1021
 
1022
  return any_set;
1023
}
1024
 
1025
 
1026
/* Phase Two: Scan all references to scalarizable variables.  Count the
1027
   number of times they are used or copied respectively.  */
1028
 
1029
/* Callbacks to fill in SRA_WALK_FNS.  Everything but USE is
1030
   considered a copy, because we can decompose the reference such that
1031
   the sub-elements needn't be contiguous.  */
1032
 
1033
static void
1034
scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
1035
          block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1036
          bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED)
1037
{
1038
  elt->n_uses += 1;
1039
}
1040
 
1041
static void
1042
scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1043
           block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1044
{
1045
  lhs_elt->n_copies += 1;
1046
  rhs_elt->n_copies += 1;
1047
}
1048
 
1049
static void
1050
scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
1051
           block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1052
{
1053
  lhs_elt->n_copies += 1;
1054
}
1055
 
1056
static void
1057
scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
1058
           block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1059
           bool is_output ATTRIBUTE_UNUSED)
1060
{
1061
  elt->n_copies += 1;
1062
}
1063
 
1064
/* Dump the values we collected during the scanning phase.  */
1065
 
1066
static void
1067
scan_dump (struct sra_elt *elt)
1068
{
1069
  struct sra_elt *c;
1070
 
1071
  dump_sra_elt_name (dump_file, elt);
1072
  fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1073
 
1074
  for (c = elt->children; c ; c = c->sibling)
1075
    scan_dump (c);
1076
 
1077
  for (c = elt->groups; c ; c = c->sibling)
1078
    scan_dump (c);
1079
}
1080
 
1081
/* Entry point to phase 2.  Scan the entire function, building up
1082
   scalarization data structures, recording copies and uses.  */
1083
 
1084
static void
1085
scan_function (void)
1086
{
1087
  static const struct sra_walk_fns fns = {
1088
    scan_use, scan_copy, scan_init, scan_ldst, true
1089
  };
1090
  bitmap_iterator bi;
1091
 
1092
  sra_walk_function (&fns);
1093
 
1094
  if (dump_file && (dump_flags & TDF_DETAILS))
1095
    {
1096
      unsigned i;
1097
 
1098
      fputs ("\nScan results:\n", dump_file);
1099
      EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1100
        {
1101
          tree var = referenced_var (i);
1102
          struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1103
          if (elt)
1104
            scan_dump (elt);
1105
        }
1106
      fputc ('\n', dump_file);
1107
    }
1108
}
1109
 
1110
/* Phase Three: Make decisions about which variables to scalarize, if any.
1111
   All elements to be scalarized have replacement variables made for them.  */
1112
 
1113
/* A subroutine of build_element_name.  Recursively build the element
1114
   name on the obstack.  */
1115
 
1116
static void
1117
build_element_name_1 (struct sra_elt *elt)
1118
{
1119
  tree t;
1120
  char buffer[32];
1121
 
1122
  if (elt->parent)
1123
    {
1124
      build_element_name_1 (elt->parent);
1125
      obstack_1grow (&sra_obstack, '$');
1126
 
1127
      if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1128
        {
1129
          if (elt->element == integer_zero_node)
1130
            obstack_grow (&sra_obstack, "real", 4);
1131
          else
1132
            obstack_grow (&sra_obstack, "imag", 4);
1133
          return;
1134
        }
1135
    }
1136
 
1137
  t = elt->element;
1138
  if (TREE_CODE (t) == INTEGER_CST)
1139
    {
1140
      /* ??? Eh.  Don't bother doing double-wide printing.  */
1141
      sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1142
      obstack_grow (&sra_obstack, buffer, strlen (buffer));
1143
    }
1144
  else
1145
    {
1146
      tree name = DECL_NAME (t);
1147
      if (name)
1148
        obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1149
                      IDENTIFIER_LENGTH (name));
1150
      else
1151
        {
1152
          sprintf (buffer, "D%u", DECL_UID (t));
1153
          obstack_grow (&sra_obstack, buffer, strlen (buffer));
1154
        }
1155
    }
1156
}
1157
 
1158
/* Construct a pretty variable name for an element's replacement variable.
1159
   The name is built on the obstack.  */
1160
 
1161
static char *
1162
build_element_name (struct sra_elt *elt)
1163
{
1164
  build_element_name_1 (elt);
1165
  obstack_1grow (&sra_obstack, '\0');
1166
  return XOBFINISH (&sra_obstack, char *);
1167
}
1168
 
1169
/* Instantiate an element as an independent variable.  */
1170
 
1171
static void
1172
instantiate_element (struct sra_elt *elt)
1173
{
1174
  struct sra_elt *base_elt;
1175
  tree var, base;
1176
 
1177
  for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1178
    continue;
1179
  base = base_elt->element;
1180
 
1181
  elt->replacement = var = make_rename_temp (elt->type, "SR");
1182
  DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1183
  DECL_ARTIFICIAL (var) = 1;
1184
 
1185
  if (TREE_THIS_VOLATILE (elt->type))
1186
    {
1187
      TREE_THIS_VOLATILE (var) = 1;
1188
      TREE_SIDE_EFFECTS (var) = 1;
1189
    }
1190
 
1191
  if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1192
    {
1193
      char *pretty_name = build_element_name (elt);
1194
      DECL_NAME (var) = get_identifier (pretty_name);
1195
      obstack_free (&sra_obstack, pretty_name);
1196
 
1197
      SET_DECL_DEBUG_EXPR (var, generate_element_ref (elt));
1198
      DECL_DEBUG_EXPR_IS_FROM (var) = 1;
1199
 
1200
      DECL_IGNORED_P (var) = 0;
1201
      TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
1202
    }
1203
  else
1204
    {
1205
      DECL_IGNORED_P (var) = 1;
1206
      /* ??? We can't generate any warning that would be meaningful.  */
1207
      TREE_NO_WARNING (var) = 1;
1208
    }
1209
 
1210
  if (dump_file)
1211
    {
1212
      fputs ("  ", dump_file);
1213
      dump_sra_elt_name (dump_file, elt);
1214
      fputs (" -> ", dump_file);
1215
      print_generic_expr (dump_file, var, dump_flags);
1216
      fputc ('\n', dump_file);
1217
    }
1218
}
1219
 
1220
/* Make one pass across an element tree deciding whether or not it's
1221
   profitable to instantiate individual leaf scalars.
1222
 
1223
   PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1224
   fields all the way up the tree.  */
1225
 
1226
static void
1227
decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1228
                        unsigned int parent_copies)
1229
{
1230
  if (dump_file && !elt->parent)
1231
    {
1232
      fputs ("Initial instantiation for ", dump_file);
1233
      dump_sra_elt_name (dump_file, elt);
1234
      fputc ('\n', dump_file);
1235
    }
1236
 
1237
  if (elt->cannot_scalarize)
1238
    return;
1239
 
1240
  if (elt->is_scalar)
1241
    {
1242
      /* The decision is simple: instantiate if we're used more frequently
1243
         than the parent needs to be seen as a complete unit.  */
1244
      if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1245
        instantiate_element (elt);
1246
    }
1247
  else
1248
    {
1249
      struct sra_elt *c, *group;
1250
      unsigned int this_uses = elt->n_uses + parent_uses;
1251
      unsigned int this_copies = elt->n_copies + parent_copies;
1252
 
1253
      /* Consider groups of sub-elements as weighing in favour of
1254
         instantiation whatever their size.  */
1255
      for (group = elt->groups; group ; group = group->sibling)
1256
        FOR_EACH_ACTUAL_CHILD (c, group)
1257
          {
1258
            c->n_uses += group->n_uses;
1259
            c->n_copies += group->n_copies;
1260
          }
1261
 
1262
      for (c = elt->children; c ; c = c->sibling)
1263
        decide_instantiation_1 (c, this_uses, this_copies);
1264
    }
1265
}
1266
 
1267
/* Compute the size and number of all instantiated elements below ELT.
1268
   We will only care about this if the size of the complete structure
1269
   fits in a HOST_WIDE_INT, so we don't have to worry about overflow.  */
1270
 
1271
static unsigned int
1272
sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1273
{
1274
  if (elt->replacement)
1275
    {
1276
      *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1277
      return 1;
1278
    }
1279
  else
1280
    {
1281
      struct sra_elt *c;
1282
      unsigned int count = 0;
1283
 
1284
      for (c = elt->children; c ; c = c->sibling)
1285
        count += sum_instantiated_sizes (c, sizep);
1286
 
1287
      return count;
1288
    }
1289
}
1290
 
1291
/* Instantiate fields in ELT->TYPE that are not currently present as
1292
   children of ELT.  */
1293
 
1294
static void instantiate_missing_elements (struct sra_elt *elt);
1295
 
1296
static void
1297
instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1298
{
1299
  struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1300
  if (sub->is_scalar)
1301
    {
1302
      if (sub->replacement == NULL)
1303
        instantiate_element (sub);
1304
    }
1305
  else
1306
    instantiate_missing_elements (sub);
1307
}
1308
 
1309
static void
1310
instantiate_missing_elements (struct sra_elt *elt)
1311
{
1312
  tree type = elt->type;
1313
 
1314
  switch (TREE_CODE (type))
1315
    {
1316
    case RECORD_TYPE:
1317
      {
1318
        tree f;
1319
        for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1320
          if (TREE_CODE (f) == FIELD_DECL)
1321
            {
1322
              tree field_type = TREE_TYPE (f);
1323
 
1324
              /* canonicalize_component_ref() unwidens some bit-field
1325
                 types (not marked as DECL_BIT_FIELD in C++), so we
1326
                 must do the same, lest we may introduce type
1327
                 mismatches.  */
1328
              if (INTEGRAL_TYPE_P (field_type)
1329
                  && DECL_MODE (f) != TYPE_MODE (field_type))
1330
                field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
1331
                                                               field_type,
1332
                                                               elt->element,
1333
                                                               f, NULL_TREE),
1334
                                                       NULL_TREE));
1335
 
1336
              instantiate_missing_elements_1 (elt, f, field_type);
1337
            }
1338
        break;
1339
      }
1340
 
1341
    case ARRAY_TYPE:
1342
      {
1343
        tree i, max, subtype;
1344
 
1345
        i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1346
        max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1347
        subtype = TREE_TYPE (type);
1348
 
1349
        while (1)
1350
          {
1351
            instantiate_missing_elements_1 (elt, i, subtype);
1352
            if (tree_int_cst_equal (i, max))
1353
              break;
1354
            i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1355
          }
1356
 
1357
        break;
1358
      }
1359
 
1360
    case COMPLEX_TYPE:
1361
      type = TREE_TYPE (type);
1362
      instantiate_missing_elements_1 (elt, integer_zero_node, type);
1363
      instantiate_missing_elements_1 (elt, integer_one_node, type);
1364
      break;
1365
 
1366
    default:
1367
      gcc_unreachable ();
1368
    }
1369
}
1370
 
1371
/* Make one pass across an element tree deciding whether to perform block
1372
   or element copies.  If we decide on element copies, instantiate all
1373
   elements.  Return true if there are any instantiated sub-elements.  */
1374
 
1375
static bool
1376
decide_block_copy (struct sra_elt *elt)
1377
{
1378
  struct sra_elt *c;
1379
  bool any_inst;
1380
 
1381
  /* We shouldn't be invoked on groups of sub-elements as they must
1382
     behave like their parent as far as block copy is concerned.  */
1383
  gcc_assert (!elt->is_group);
1384
 
1385
  /* If scalarization is disabled, respect it.  */
1386
  if (elt->cannot_scalarize)
1387
    {
1388
      elt->use_block_copy = 1;
1389
 
1390
      if (dump_file)
1391
        {
1392
          fputs ("Scalarization disabled for ", dump_file);
1393
          dump_sra_elt_name (dump_file, elt);
1394
          fputc ('\n', dump_file);
1395
        }
1396
 
1397
      /* Disable scalarization of sub-elements */
1398
      for (c = elt->children; c; c = c->sibling)
1399
        {
1400
          c->cannot_scalarize = 1;
1401
          decide_block_copy (c);
1402
        }
1403
 
1404
      /* Groups behave like their parent.  */
1405
      for (c = elt->groups; c; c = c->sibling)
1406
        {
1407
          c->cannot_scalarize = 1;
1408
          c->use_block_copy = 1;
1409
        }
1410
 
1411
      return false;
1412
    }
1413
 
1414
  /* Don't decide if we've no uses.  */
1415
  if (elt->n_uses == 0 && elt->n_copies == 0)
1416
    ;
1417
 
1418
  else if (!elt->is_scalar)
1419
    {
1420
      tree size_tree = TYPE_SIZE_UNIT (elt->type);
1421
      bool use_block_copy = true;
1422
 
1423
      /* Tradeoffs for COMPLEX types pretty much always make it better
1424
         to go ahead and split the components.  */
1425
      if (TREE_CODE (elt->type) == COMPLEX_TYPE)
1426
        use_block_copy = false;
1427
 
1428
      /* Don't bother trying to figure out the rest if the structure is
1429
         so large we can't do easy arithmetic.  This also forces block
1430
         copies for variable sized structures.  */
1431
      else if (host_integerp (size_tree, 1))
1432
        {
1433
          unsigned HOST_WIDE_INT full_size, inst_size = 0;
1434
          unsigned int max_size, max_count, inst_count, full_count;
1435
 
1436
          /* If the sra-max-structure-size parameter is 0, then the
1437
             user has not overridden the parameter and we can choose a
1438
             sensible default.  */
1439
          max_size = SRA_MAX_STRUCTURE_SIZE
1440
            ? SRA_MAX_STRUCTURE_SIZE
1441
            : MOVE_RATIO * UNITS_PER_WORD;
1442
          max_count = SRA_MAX_STRUCTURE_COUNT
1443
            ? SRA_MAX_STRUCTURE_COUNT
1444
            : MOVE_RATIO;
1445
 
1446
          full_size = tree_low_cst (size_tree, 1);
1447
          full_count = count_type_elements (elt->type, false);
1448
          inst_count = sum_instantiated_sizes (elt, &inst_size);
1449
 
1450
          /* ??? What to do here.  If there are two fields, and we've only
1451
             instantiated one, then instantiating the other is clearly a win.
1452
             If there are a large number of fields then the size of the copy
1453
             is much more of a factor.  */
1454
 
1455
          /* If the structure is small, and we've made copies, go ahead
1456
             and instantiate, hoping that the copies will go away.  */
1457
          if (full_size <= max_size
1458
              && (full_count - inst_count) <= max_count
1459
              && elt->n_copies > elt->n_uses)
1460
            use_block_copy = false;
1461
          else if (inst_count * 100 >= full_count * SRA_FIELD_STRUCTURE_RATIO
1462
                   && inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
1463
            use_block_copy = false;
1464
 
1465
          /* In order to avoid block copy, we have to be able to instantiate
1466
             all elements of the type.  See if this is possible.  */
1467
          if (!use_block_copy
1468
              && (!can_completely_scalarize_p (elt)
1469
                  || !type_can_instantiate_all_elements (elt->type)))
1470
            use_block_copy = true;
1471
        }
1472
 
1473
      elt->use_block_copy = use_block_copy;
1474
 
1475
      /* Groups behave like their parent.  */
1476
      for (c = elt->groups; c; c = c->sibling)
1477
        c->use_block_copy = use_block_copy;
1478
 
1479
      if (dump_file)
1480
        {
1481
          fprintf (dump_file, "Using %s for ",
1482
                   use_block_copy ? "block-copy" : "element-copy");
1483
          dump_sra_elt_name (dump_file, elt);
1484
          fputc ('\n', dump_file);
1485
        }
1486
 
1487
      if (!use_block_copy)
1488
        {
1489
          instantiate_missing_elements (elt);
1490
          return true;
1491
        }
1492
    }
1493
 
1494
  any_inst = elt->replacement != NULL;
1495
 
1496
  for (c = elt->children; c ; c = c->sibling)
1497
    any_inst |= decide_block_copy (c);
1498
 
1499
  return any_inst;
1500
}
1501
 
1502
/* Entry point to phase 3.  Instantiate scalar replacement variables.  */
1503
 
1504
static void
1505
decide_instantiations (void)
1506
{
1507
  unsigned int i;
1508
  bool cleared_any;
1509
  bitmap_head done_head;
1510
  bitmap_iterator bi;
1511
 
1512
  /* We cannot clear bits from a bitmap we're iterating over,
1513
     so save up all the bits to clear until the end.  */
1514
  bitmap_initialize (&done_head, &bitmap_default_obstack);
1515
  cleared_any = false;
1516
 
1517
  EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1518
    {
1519
      tree var = referenced_var (i);
1520
      struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1521
      if (elt)
1522
        {
1523
          decide_instantiation_1 (elt, 0, 0);
1524
          if (!decide_block_copy (elt))
1525
            elt = NULL;
1526
        }
1527
      if (!elt)
1528
        {
1529
          bitmap_set_bit (&done_head, i);
1530
          cleared_any = true;
1531
        }
1532
    }
1533
 
1534
  if (cleared_any)
1535
    {
1536
      bitmap_and_compl_into (sra_candidates, &done_head);
1537
      bitmap_and_compl_into (needs_copy_in, &done_head);
1538
    }
1539
  bitmap_clear (&done_head);
1540
 
1541
  if (!bitmap_empty_p (sra_candidates))
1542
    todoflags |= TODO_update_smt_usage;
1543
 
1544
  mark_set_for_renaming (sra_candidates);
1545
 
1546
  if (dump_file)
1547
    fputc ('\n', dump_file);
1548
}
1549
 
1550
 
1551
/* Phase Four: Update the function to match the replacements created.  */
1552
 
1553
/* Mark all the variables in V_MAY_DEF or V_MUST_DEF operands for STMT for
1554
   renaming. This becomes necessary when we modify all of a non-scalar.  */
1555
 
1556
static void
1557
mark_all_v_defs_1 (tree stmt)
1558
{
1559
  tree sym;
1560
  ssa_op_iter iter;
1561
 
1562
  update_stmt_if_modified (stmt);
1563
 
1564
  FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
1565
    {
1566
      if (TREE_CODE (sym) == SSA_NAME)
1567
        sym = SSA_NAME_VAR (sym);
1568
      mark_sym_for_renaming (sym);
1569
    }
1570
}
1571
 
1572
 
1573
/* Mark all the variables in virtual operands in all the statements in
1574
   LIST for renaming.  */
1575
 
1576
static void
1577
mark_all_v_defs (tree list)
1578
{
1579
  if (TREE_CODE (list) != STATEMENT_LIST)
1580
    mark_all_v_defs_1 (list);
1581
  else
1582
    {
1583
      tree_stmt_iterator i;
1584
      for (i = tsi_start (list); !tsi_end_p (i); tsi_next (&i))
1585
        mark_all_v_defs_1 (tsi_stmt (i));
1586
    }
1587
}
1588
 
1589
/* Mark every replacement under ELT with TREE_NO_WARNING.  */
1590
 
1591
static void
1592
mark_no_warning (struct sra_elt *elt)
1593
{
1594
  if (!elt->all_no_warning)
1595
    {
1596
      if (elt->replacement)
1597
        TREE_NO_WARNING (elt->replacement) = 1;
1598
      else
1599
        {
1600
          struct sra_elt *c;
1601
          FOR_EACH_ACTUAL_CHILD (c, elt)
1602
            mark_no_warning (c);
1603
        }
1604
      elt->all_no_warning = true;
1605
    }
1606
}
1607
 
1608
/* Build a single level component reference to ELT rooted at BASE.  */
1609
 
1610
static tree
1611
generate_one_element_ref (struct sra_elt *elt, tree base)
1612
{
1613
  switch (TREE_CODE (TREE_TYPE (base)))
1614
    {
1615
    case RECORD_TYPE:
1616
      {
1617
        tree field = elt->element;
1618
 
1619
        /* Watch out for compatible records with differing field lists.  */
1620
        if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
1621
          field = find_compatible_field (TREE_TYPE (base), field);
1622
 
1623
        return build3 (COMPONENT_REF, elt->type, base, field, NULL);
1624
      }
1625
 
1626
    case ARRAY_TYPE:
1627
      todoflags |= TODO_update_smt_usage;
1628
      if (TREE_CODE (elt->element) == RANGE_EXPR)
1629
        return build4 (ARRAY_RANGE_REF, elt->type, base,
1630
                       TREE_OPERAND (elt->element, 0), NULL, NULL);
1631
      else
1632
        return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
1633
 
1634
    case COMPLEX_TYPE:
1635
      if (elt->element == integer_zero_node)
1636
        return build1 (REALPART_EXPR, elt->type, base);
1637
      else
1638
        return build1 (IMAGPART_EXPR, elt->type, base);
1639
 
1640
    default:
1641
      gcc_unreachable ();
1642
    }
1643
}
1644
 
1645
/* Build a full component reference to ELT rooted at its native variable.  */
1646
 
1647
static tree
1648
generate_element_ref (struct sra_elt *elt)
1649
{
1650
  if (elt->parent)
1651
    return generate_one_element_ref (elt, generate_element_ref (elt->parent));
1652
  else
1653
    return elt->element;
1654
}
1655
 
1656
static tree
1657
sra_build_assignment (tree dst, tree src)
1658
{
1659
  /* We need TYPE_CANONICAL to compare the types of dst and src
1660
     efficiently, but that's only introduced in GCC 4.3.  */
1661
  return build2 (MODIFY_EXPR, void_type_node, dst, src);
1662
}
1663
 
1664
/* Generate a set of assignment statements in *LIST_P to copy all
1665
   instantiated elements under ELT to or from the equivalent structure
1666
   rooted at EXPR.  COPY_OUT controls the direction of the copy, with
1667
   true meaning to copy out of EXPR into ELT.  */
1668
 
1669
static void
1670
generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
1671
                     tree *list_p)
1672
{
1673
  struct sra_elt *c;
1674
  tree t;
1675
 
1676
  if (!copy_out && TREE_CODE (expr) == SSA_NAME
1677
      && TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
1678
    {
1679
      tree r, i;
1680
 
1681
      c = lookup_element (elt, integer_zero_node, NULL, NO_INSERT);
1682
      r = c->replacement;
1683
      c = lookup_element (elt, integer_one_node, NULL, NO_INSERT);
1684
      i = c->replacement;
1685
 
1686
      t = build2 (COMPLEX_EXPR, elt->type, r, i);
1687
      t = sra_build_assignment (expr, t);
1688
      SSA_NAME_DEF_STMT (expr) = t;
1689
      append_to_statement_list (t, list_p);
1690
    }
1691
  else if (elt->replacement)
1692
    {
1693
      if (copy_out)
1694
        t = sra_build_assignment (elt->replacement, expr);
1695
      else
1696
        t = sra_build_assignment (expr, elt->replacement);
1697
      append_to_statement_list (t, list_p);
1698
    }
1699
  else
1700
    {
1701
      FOR_EACH_ACTUAL_CHILD (c, elt)
1702
        {
1703
          t = generate_one_element_ref (c, unshare_expr (expr));
1704
          generate_copy_inout (c, copy_out, t, list_p);
1705
        }
1706
    }
1707
}
1708
 
1709
/* Generate a set of assignment statements in *LIST_P to copy all instantiated
1710
   elements under SRC to their counterparts under DST.  There must be a 1-1
1711
   correspondence of instantiated elements.  */
1712
 
1713
static void
1714
generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
1715
{
1716
  struct sra_elt *dc, *sc;
1717
 
1718
  FOR_EACH_ACTUAL_CHILD (dc, dst)
1719
    {
1720
      sc = lookup_element (src, dc->element, NULL, NO_INSERT);
1721
      gcc_assert (sc);
1722
      generate_element_copy (dc, sc, list_p);
1723
    }
1724
 
1725
  if (dst->replacement)
1726
    {
1727
      tree t;
1728
 
1729
      gcc_assert (src->replacement);
1730
 
1731
      t = sra_build_assignment (dst->replacement, src->replacement);
1732
      append_to_statement_list (t, list_p);
1733
    }
1734
}
1735
 
1736
/* Generate a set of assignment statements in *LIST_P to zero all instantiated
1737
   elements under ELT.  In addition, do not assign to elements that have been
1738
   marked VISITED but do reset the visited flag; this allows easy coordination
1739
   with generate_element_init.  */
1740
 
1741
static void
1742
generate_element_zero (struct sra_elt *elt, tree *list_p)
1743
{
1744
  struct sra_elt *c;
1745
 
1746
  if (elt->visited)
1747
    {
1748
      elt->visited = false;
1749
      return;
1750
    }
1751
 
1752
  FOR_EACH_ACTUAL_CHILD (c, elt)
1753
    generate_element_zero (c, list_p);
1754
 
1755
  if (elt->replacement)
1756
    {
1757
      tree t;
1758
 
1759
      gcc_assert (elt->is_scalar);
1760
      t = fold_convert (elt->type, integer_zero_node);
1761
 
1762
      t = sra_build_assignment (elt->replacement, t);
1763
      append_to_statement_list (t, list_p);
1764
    }
1765
}
1766
 
1767
/* Generate an assignment VAR = INIT, where INIT may need gimplification.
1768
   Add the result to *LIST_P.  */
1769
 
1770
static void
1771
generate_one_element_init (tree var, tree init, tree *list_p)
1772
{
1773
  /* The replacement can be almost arbitrarily complex.  Gimplify.  */
1774
  tree stmt = sra_build_assignment (var, init);
1775
  gimplify_and_add (stmt, list_p);
1776
}
1777
 
1778
/* Generate a set of assignment statements in *LIST_P to set all instantiated
1779
   elements under ELT with the contents of the initializer INIT.  In addition,
1780
   mark all assigned elements VISITED; this allows easy coordination with
1781
   generate_element_zero.  Return false if we found a case we couldn't
1782
   handle.  */
1783
 
1784
static bool
1785
generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
1786
{
1787
  bool result = true;
1788
  enum tree_code init_code;
1789
  struct sra_elt *sub;
1790
  tree t;
1791
  unsigned HOST_WIDE_INT idx;
1792
  tree value, purpose;
1793
 
1794
  /* We can be passed DECL_INITIAL of a static variable.  It might have a
1795
     conversion, which we strip off here.  */
1796
  STRIP_USELESS_TYPE_CONVERSION (init);
1797
  init_code = TREE_CODE (init);
1798
 
1799
  if (elt->is_scalar)
1800
    {
1801
      if (elt->replacement)
1802
        {
1803
          generate_one_element_init (elt->replacement, init, list_p);
1804
          elt->visited = true;
1805
        }
1806
      return result;
1807
    }
1808
 
1809
  switch (init_code)
1810
    {
1811
    case COMPLEX_CST:
1812
    case COMPLEX_EXPR:
1813
      FOR_EACH_ACTUAL_CHILD (sub, elt)
1814
        {
1815
          if (sub->element == integer_zero_node)
1816
            t = (init_code == COMPLEX_EXPR
1817
                 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
1818
          else
1819
            t = (init_code == COMPLEX_EXPR
1820
                 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
1821
          result &= generate_element_init_1 (sub, t, list_p);
1822
        }
1823
      break;
1824
 
1825
    case CONSTRUCTOR:
1826
      FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, purpose, value)
1827
        {
1828
          if (TREE_CODE (purpose) == RANGE_EXPR)
1829
            {
1830
              tree lower = TREE_OPERAND (purpose, 0);
1831
              tree upper = TREE_OPERAND (purpose, 1);
1832
 
1833
              while (1)
1834
                {
1835
                  sub = lookup_element (elt, lower, NULL, NO_INSERT);
1836
                  if (sub != NULL)
1837
                    result &= generate_element_init_1 (sub, value, list_p);
1838
                  if (tree_int_cst_equal (lower, upper))
1839
                    break;
1840
                  lower = int_const_binop (PLUS_EXPR, lower,
1841
                                           integer_one_node, true);
1842
                }
1843
            }
1844
          else
1845
            {
1846
              sub = lookup_element (elt, purpose, NULL, NO_INSERT);
1847
              if (sub != NULL)
1848
                result &= generate_element_init_1 (sub, value, list_p);
1849
            }
1850
        }
1851
      break;
1852
 
1853
    default:
1854
      elt->visited = true;
1855
      result = false;
1856
    }
1857
 
1858
  return result;
1859
}
1860
 
1861
/* A wrapper function for generate_element_init_1 that handles cleanup after
1862
   gimplification.  */
1863
 
1864
static bool
1865
generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
1866
{
1867
  bool ret;
1868
 
1869
  push_gimplify_context ();
1870
  ret = generate_element_init_1 (elt, init, list_p);
1871
  pop_gimplify_context (NULL);
1872
 
1873
  /* The replacement can expose previously unreferenced variables.  */
1874
  if (ret && *list_p)
1875
    {
1876
      tree_stmt_iterator i;
1877
 
1878
      for (i = tsi_start (*list_p); !tsi_end_p (i); tsi_next (&i))
1879
        find_new_referenced_vars (tsi_stmt_ptr (i));
1880
    }
1881
 
1882
  return ret;
1883
}
1884
 
1885
/* Insert STMT on all the outgoing edges out of BB.  Note that if BB
1886
   has more than one edge, STMT will be replicated for each edge.  Also,
1887
   abnormal edges will be ignored.  */
1888
 
1889
void
1890
insert_edge_copies (tree stmt, basic_block bb)
1891
{
1892
  edge e;
1893
  edge_iterator ei;
1894
  bool first_copy;
1895
 
1896
  first_copy = true;
1897
  FOR_EACH_EDGE (e, ei, bb->succs)
1898
    {
1899
      /* We don't need to insert copies on abnormal edges.  The
1900
         value of the scalar replacement is not guaranteed to
1901
         be valid through an abnormal edge.  */
1902
      if (!(e->flags & EDGE_ABNORMAL))
1903
        {
1904
          if (first_copy)
1905
            {
1906
              bsi_insert_on_edge (e, stmt);
1907
              first_copy = false;
1908
            }
1909
          else
1910
            bsi_insert_on_edge (e, unsave_expr_now (stmt));
1911
        }
1912
    }
1913
}
1914
 
1915
/* Helper function to insert LIST before BSI, and set up line number info.  */
1916
 
1917
void
1918
sra_insert_before (block_stmt_iterator *bsi, tree list)
1919
{
1920
  tree stmt = bsi_stmt (*bsi);
1921
 
1922
  if (EXPR_HAS_LOCATION (stmt))
1923
    annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1924
  bsi_insert_before (bsi, list, BSI_SAME_STMT);
1925
}
1926
 
1927
/* Similarly, but insert after BSI.  Handles insertion onto edges as well.  */
1928
 
1929
void
1930
sra_insert_after (block_stmt_iterator *bsi, tree list)
1931
{
1932
  tree stmt = bsi_stmt (*bsi);
1933
 
1934
  if (EXPR_HAS_LOCATION (stmt))
1935
    annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1936
 
1937
  if (stmt_ends_bb_p (stmt))
1938
    insert_edge_copies (list, bsi->bb);
1939
  else
1940
    bsi_insert_after (bsi, list, BSI_SAME_STMT);
1941
}
1942
 
1943
/* Similarly, but replace the statement at BSI.  */
1944
 
1945
static void
1946
sra_replace (block_stmt_iterator *bsi, tree list)
1947
{
1948
  sra_insert_before (bsi, list);
1949
  bsi_remove (bsi, false);
1950
  if (bsi_end_p (*bsi))
1951
    *bsi = bsi_last (bsi->bb);
1952
  else
1953
    bsi_prev (bsi);
1954
}
1955
 
1956
/* Scalarize a USE.  To recap, this is either a simple reference to ELT,
1957
   if elt is scalar, or some occurrence of ELT that requires a complete
1958
   aggregate.  IS_OUTPUT is true if ELT is being modified.  */
1959
 
1960
static void
1961
scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
1962
               bool is_output, bool use_all)
1963
{
1964
  tree list = NULL, stmt = bsi_stmt (*bsi);
1965
 
1966
  if (elt->replacement)
1967
    {
1968
      /* If we have a replacement, then updating the reference is as
1969
         simple as modifying the existing statement in place.  */
1970
      if (is_output)
1971
        mark_all_v_defs (stmt);
1972
      *expr_p = elt->replacement;
1973
      update_stmt (stmt);
1974
    }
1975
  else
1976
    {
1977
      /* Otherwise we need some copies.  If ELT is being read, then we want
1978
         to store all (modified) sub-elements back into the structure before
1979
         the reference takes place.  If ELT is being written, then we want to
1980
         load the changed values back into our shadow variables.  */
1981
      /* ??? We don't check modified for reads, we just always write all of
1982
         the values.  We should be able to record the SSA number of the VOP
1983
         for which the values were last read.  If that number matches the
1984
         SSA number of the VOP in the current statement, then we needn't
1985
         emit an assignment.  This would also eliminate double writes when
1986
         a structure is passed as more than one argument to a function call.
1987
         This optimization would be most effective if sra_walk_function
1988
         processed the blocks in dominator order.  */
1989
 
1990
      generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
1991
      if (list == NULL)
1992
        return;
1993
      mark_all_v_defs (list);
1994
      if (is_output)
1995
        sra_insert_after (bsi, list);
1996
      else
1997
        {
1998
          sra_insert_before (bsi, list);
1999
          if (use_all)
2000
            mark_no_warning (elt);
2001
        }
2002
    }
2003
}
2004
 
2005
/* Scalarize a COPY.  To recap, this is an assignment statement between
2006
   two scalarizable references, LHS_ELT and RHS_ELT.  */
2007
 
2008
static void
2009
scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
2010
                block_stmt_iterator *bsi)
2011
{
2012
  tree list, stmt;
2013
 
2014
  if (lhs_elt->replacement && rhs_elt->replacement)
2015
    {
2016
      /* If we have two scalar operands, modify the existing statement.  */
2017
      stmt = bsi_stmt (*bsi);
2018
 
2019
      /* See the commentary in sra_walk_function concerning
2020
         RETURN_EXPR, and why we should never see one here.  */
2021
      gcc_assert (TREE_CODE (stmt) == MODIFY_EXPR);
2022
 
2023
      TREE_OPERAND (stmt, 0) = lhs_elt->replacement;
2024
      TREE_OPERAND (stmt, 1) = rhs_elt->replacement;
2025
      update_stmt (stmt);
2026
    }
2027
  else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
2028
    {
2029
      /* If either side requires a block copy, then sync the RHS back
2030
         to the original structure, leave the original assignment
2031
         statement (which will perform the block copy), then load the
2032
         LHS values out of its now-updated original structure.  */
2033
      /* ??? Could perform a modified pair-wise element copy.  That
2034
         would at least allow those elements that are instantiated in
2035
         both structures to be optimized well.  */
2036
 
2037
      list = NULL;
2038
      generate_copy_inout (rhs_elt, false,
2039
                           generate_element_ref (rhs_elt), &list);
2040
      if (list)
2041
        {
2042
          mark_all_v_defs (list);
2043
          sra_insert_before (bsi, list);
2044
        }
2045
 
2046
      list = NULL;
2047
      generate_copy_inout (lhs_elt, true,
2048
                           generate_element_ref (lhs_elt), &list);
2049
      if (list)
2050
        {
2051
          mark_all_v_defs (list);
2052
          sra_insert_after (bsi, list);
2053
        }
2054
    }
2055
  else
2056
    {
2057
      /* Otherwise both sides must be fully instantiated.  In which
2058
         case perform pair-wise element assignments and replace the
2059
         original block copy statement.  */
2060
 
2061
      stmt = bsi_stmt (*bsi);
2062
      mark_all_v_defs (stmt);
2063
 
2064
      list = NULL;
2065
      generate_element_copy (lhs_elt, rhs_elt, &list);
2066
      gcc_assert (list);
2067
      mark_all_v_defs (list);
2068
      sra_replace (bsi, list);
2069
    }
2070
}
2071
 
2072
/* Scalarize an INIT.  To recap, this is an assignment to a scalarizable
2073
   reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
2074
   COMPLEX_EXPR.  If RHS is NULL, it should be treated as an empty
2075
   CONSTRUCTOR.  */
2076
 
2077
static void
2078
scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
2079
{
2080
  bool result = true;
2081
  tree list = NULL;
2082
 
2083
  /* Generate initialization statements for all members extant in the RHS.  */
2084
  if (rhs)
2085
    {
2086
      /* Unshare the expression just in case this is from a decl's initial.  */
2087
      rhs = unshare_expr (rhs);
2088
      result = generate_element_init (lhs_elt, rhs, &list);
2089
    }
2090
 
2091
  /* CONSTRUCTOR is defined such that any member not mentioned is assigned
2092
     a zero value.  Initialize the rest of the instantiated elements.  */
2093
  generate_element_zero (lhs_elt, &list);
2094
 
2095
  if (!result)
2096
    {
2097
      /* If we failed to convert the entire initializer, then we must
2098
         leave the structure assignment in place and must load values
2099
         from the structure into the slots for which we did not find
2100
         constants.  The easiest way to do this is to generate a complete
2101
         copy-out, and then follow that with the constant assignments
2102
         that we were able to build.  DCE will clean things up.  */
2103
      tree list0 = NULL;
2104
      generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
2105
                           &list0);
2106
      append_to_statement_list (list, &list0);
2107
      list = list0;
2108
    }
2109
 
2110
  if (lhs_elt->use_block_copy || !result)
2111
    {
2112
      /* Since LHS is not fully instantiated, we must leave the structure
2113
         assignment in place.  Treating this case differently from a USE
2114
         exposes constants to later optimizations.  */
2115
      if (list)
2116
        {
2117
          mark_all_v_defs (list);
2118
          sra_insert_after (bsi, list);
2119
        }
2120
    }
2121
  else
2122
    {
2123
      /* The LHS is fully instantiated.  The list of initializations
2124
         replaces the original structure assignment.  */
2125
      gcc_assert (list);
2126
      mark_all_v_defs (bsi_stmt (*bsi));
2127
      mark_all_v_defs (list);
2128
      sra_replace (bsi, list);
2129
    }
2130
}
2131
 
2132
/* A subroutine of scalarize_ldst called via walk_tree.  Set TREE_NO_TRAP
2133
   on all INDIRECT_REFs.  */
2134
 
2135
static tree
2136
mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2137
{
2138
  tree t = *tp;
2139
 
2140
  if (TREE_CODE (t) == INDIRECT_REF)
2141
    {
2142
      TREE_THIS_NOTRAP (t) = 1;
2143
      *walk_subtrees = 0;
2144
    }
2145
  else if (IS_TYPE_OR_DECL_P (t))
2146
    *walk_subtrees = 0;
2147
 
2148
  return NULL;
2149
}
2150
 
2151
/* Scalarize a LDST.  To recap, this is an assignment between one scalarizable
2152
   reference ELT and one non-scalarizable reference OTHER.  IS_OUTPUT is true
2153
   if ELT is on the left-hand side.  */
2154
 
2155
static void
2156
scalarize_ldst (struct sra_elt *elt, tree other,
2157
                block_stmt_iterator *bsi, bool is_output)
2158
{
2159
  /* Shouldn't have gotten called for a scalar.  */
2160
  gcc_assert (!elt->replacement);
2161
 
2162
  if (elt->use_block_copy)
2163
    {
2164
      /* Since ELT is not fully instantiated, we have to leave the
2165
         block copy in place.  Treat this as a USE.  */
2166
      scalarize_use (elt, NULL, bsi, is_output, false);
2167
    }
2168
  else
2169
    {
2170
      /* The interesting case is when ELT is fully instantiated.  In this
2171
         case we can have each element stored/loaded directly to/from the
2172
         corresponding slot in OTHER.  This avoids a block copy.  */
2173
 
2174
      tree list = NULL, stmt = bsi_stmt (*bsi);
2175
 
2176
      mark_all_v_defs (stmt);
2177
      generate_copy_inout (elt, is_output, other, &list);
2178
      mark_all_v_defs (list);
2179
      gcc_assert (list);
2180
 
2181
      /* Preserve EH semantics.  */
2182
      if (stmt_ends_bb_p (stmt))
2183
        {
2184
          tree_stmt_iterator tsi;
2185
          tree first;
2186
 
2187
          /* Extract the first statement from LIST.  */
2188
          tsi = tsi_start (list);
2189
          first = tsi_stmt (tsi);
2190
          tsi_delink (&tsi);
2191
 
2192
          /* Replace the old statement with this new representative.  */
2193
          bsi_replace (bsi, first, true);
2194
 
2195
          if (!tsi_end_p (tsi))
2196
            {
2197
              /* If any reference would trap, then they all would.  And more
2198
                 to the point, the first would.  Therefore none of the rest
2199
                 will trap since the first didn't.  Indicate this by
2200
                 iterating over the remaining statements and set
2201
                 TREE_THIS_NOTRAP in all INDIRECT_REFs.  */
2202
              do
2203
                {
2204
                  walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
2205
                  tsi_next (&tsi);
2206
                }
2207
              while (!tsi_end_p (tsi));
2208
 
2209
              insert_edge_copies (list, bsi->bb);
2210
            }
2211
        }
2212
      else
2213
        sra_replace (bsi, list);
2214
    }
2215
}
2216
 
2217
/* Generate initializations for all scalarizable parameters.  */
2218
 
2219
static void
2220
scalarize_parms (void)
2221
{
2222
  tree list = NULL;
2223
  unsigned i;
2224
  bitmap_iterator bi;
2225
 
2226
  EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
2227
    {
2228
      tree var = referenced_var (i);
2229
      struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
2230
      generate_copy_inout (elt, true, var, &list);
2231
    }
2232
 
2233
  if (list)
2234
    {
2235
      insert_edge_copies (list, ENTRY_BLOCK_PTR);
2236
      mark_all_v_defs (list);
2237
    }
2238
}
2239
 
2240
/* Entry point to phase 4.  Update the function to match replacements.  */
2241
 
2242
static void
2243
scalarize_function (void)
2244
{
2245
  static const struct sra_walk_fns fns = {
2246
    scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
2247
  };
2248
 
2249
  sra_walk_function (&fns);
2250
  scalarize_parms ();
2251
  bsi_commit_edge_inserts ();
2252
}
2253
 
2254
 
2255
/* Debug helper function.  Print ELT in a nice human-readable format.  */
2256
 
2257
static void
2258
dump_sra_elt_name (FILE *f, struct sra_elt *elt)
2259
{
2260
  if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
2261
    {
2262
      fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
2263
      dump_sra_elt_name (f, elt->parent);
2264
    }
2265
  else
2266
    {
2267
      if (elt->parent)
2268
        dump_sra_elt_name (f, elt->parent);
2269
      if (DECL_P (elt->element))
2270
        {
2271
          if (TREE_CODE (elt->element) == FIELD_DECL)
2272
            fputc ('.', f);
2273
          print_generic_expr (f, elt->element, dump_flags);
2274
        }
2275
      else if (TREE_CODE (elt->element) == RANGE_EXPR)
2276
        fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]",
2277
                 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)),
2278
                 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 1)));
2279
      else
2280
        fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
2281
                 TREE_INT_CST_LOW (elt->element));
2282
    }
2283
}
2284
 
2285
/* Likewise, but callable from the debugger.  */
2286
 
2287
void
2288
debug_sra_elt_name (struct sra_elt *elt)
2289
{
2290
  dump_sra_elt_name (stderr, elt);
2291
  fputc ('\n', stderr);
2292
}
2293
 
2294
void
2295
sra_init_cache (void)
2296
{
2297
  if (sra_type_decomp_cache)
2298
    return;
2299
 
2300
  sra_type_decomp_cache = BITMAP_ALLOC (NULL);
2301
  sra_type_inst_cache = BITMAP_ALLOC (NULL);
2302
}
2303
 
2304
/* Main entry point.  */
2305
 
2306
static unsigned int
2307
tree_sra (void)
2308
{
2309
  /* Initialize local variables.  */
2310
  todoflags = 0;
2311
  gcc_obstack_init (&sra_obstack);
2312
  sra_candidates = BITMAP_ALLOC (NULL);
2313
  needs_copy_in = BITMAP_ALLOC (NULL);
2314
  sra_init_cache ();
2315
  sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
2316
 
2317
  /* Scan.  If we find anything, instantiate and scalarize.  */
2318
  if (find_candidates_for_sra ())
2319
    {
2320
      scan_function ();
2321
      decide_instantiations ();
2322
      scalarize_function ();
2323
    }
2324
 
2325
  /* Free allocated memory.  */
2326
  htab_delete (sra_map);
2327
  sra_map = NULL;
2328
  BITMAP_FREE (sra_candidates);
2329
  BITMAP_FREE (needs_copy_in);
2330
  BITMAP_FREE (sra_type_decomp_cache);
2331
  BITMAP_FREE (sra_type_inst_cache);
2332
  obstack_free (&sra_obstack, NULL);
2333
  return todoflags;
2334
}
2335
 
2336
static bool
2337
gate_sra (void)
2338
{
2339
  return flag_tree_sra != 0;
2340
}
2341
 
2342
struct tree_opt_pass pass_sra =
2343
{
2344
  "sra",                                /* name */
2345
  gate_sra,                             /* gate */
2346
  tree_sra,                             /* execute */
2347
  NULL,                                 /* sub */
2348
  NULL,                                 /* next */
2349
  0,                                     /* static_pass_number */
2350
  TV_TREE_SRA,                          /* tv_id */
2351
  PROP_cfg | PROP_ssa | PROP_alias,     /* properties_required */
2352
  0,                                     /* properties_provided */
2353
  PROP_smt_usage,                       /* properties_destroyed */
2354
  0,                                     /* todo_flags_start */
2355
  TODO_dump_func /* todo_flags_finish */
2356
  | TODO_update_ssa
2357
  | TODO_ggc_collect | TODO_verify_ssa,
2358
 
2359
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.