OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [cfgexpand.c] - Blame information for rev 684

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* A pass for lowering trees to RTL.
2
   Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify
8
it under the terms of the GNU General Public License as published by
9
the Free Software Foundation; either version 3, or (at your option)
10
any later version.
11
 
12
GCC is distributed in the hope that it will be useful,
13
but WITHOUT ANY WARRANTY; without even the implied warranty of
14
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15
GNU General Public License for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
#include "tree.h"
26
#include "rtl.h"
27
#include "tm_p.h"
28
#include "basic-block.h"
29
#include "function.h"
30
#include "expr.h"
31
#include "langhooks.h"
32
#include "tree-flow.h"
33
#include "timevar.h"
34
#include "tree-dump.h"
35
#include "tree-pass.h"
36
#include "except.h"
37
#include "flags.h"
38
#include "diagnostic.h"
39
#include "tree-pretty-print.h"
40
#include "gimple-pretty-print.h"
41
#include "toplev.h"
42
#include "debug.h"
43
#include "params.h"
44
#include "tree-inline.h"
45
#include "value-prof.h"
46
#include "target.h"
47
#include "ssaexpand.h"
48
#include "bitmap.h"
49
#include "sbitmap.h"
50
#include "insn-attr.h" /* For INSN_SCHEDULING.  */
51
 
52
/* This variable holds information helping the rewriting of SSA trees
53
   into RTL.  */
54
struct ssaexpand SA;
55
 
56
/* This variable holds the currently expanded gimple statement for purposes
57
   of comminucating the profile info to the builtin expanders.  */
58
gimple currently_expanding_gimple_stmt;
59
 
60
static rtx expand_debug_expr (tree);
61
 
62
/* Return an expression tree corresponding to the RHS of GIMPLE
63
   statement STMT.  */
64
 
65
tree
66
gimple_assign_rhs_to_tree (gimple stmt)
67
{
68
  tree t;
69
  enum gimple_rhs_class grhs_class;
70
 
71
  grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
72
 
73
  if (grhs_class == GIMPLE_TERNARY_RHS)
74
    t = build3 (gimple_assign_rhs_code (stmt),
75
                TREE_TYPE (gimple_assign_lhs (stmt)),
76
                gimple_assign_rhs1 (stmt),
77
                gimple_assign_rhs2 (stmt),
78
                gimple_assign_rhs3 (stmt));
79
  else if (grhs_class == GIMPLE_BINARY_RHS)
80
    t = build2 (gimple_assign_rhs_code (stmt),
81
                TREE_TYPE (gimple_assign_lhs (stmt)),
82
                gimple_assign_rhs1 (stmt),
83
                gimple_assign_rhs2 (stmt));
84
  else if (grhs_class == GIMPLE_UNARY_RHS)
85
    t = build1 (gimple_assign_rhs_code (stmt),
86
                TREE_TYPE (gimple_assign_lhs (stmt)),
87
                gimple_assign_rhs1 (stmt));
88
  else if (grhs_class == GIMPLE_SINGLE_RHS)
89
    {
90
      t = gimple_assign_rhs1 (stmt);
91
      /* Avoid modifying this tree in place below.  */
92
      if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
93
           && gimple_location (stmt) != EXPR_LOCATION (t))
94
          || (gimple_block (stmt)
95
              && currently_expanding_to_rtl
96
              && EXPR_P (t)
97
              && gimple_block (stmt) != TREE_BLOCK (t)))
98
        t = copy_node (t);
99
    }
100
  else
101
    gcc_unreachable ();
102
 
103
  if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
104
    SET_EXPR_LOCATION (t, gimple_location (stmt));
105
  if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
106
    TREE_BLOCK (t) = gimple_block (stmt);
107
 
108
  return t;
109
}
110
 
111
 
112
#ifndef STACK_ALIGNMENT_NEEDED
113
#define STACK_ALIGNMENT_NEEDED 1
114
#endif
115
 
116
#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
117
 
118
/* Associate declaration T with storage space X.  If T is no
119
   SSA name this is exactly SET_DECL_RTL, otherwise make the
120
   partition of T associated with X.  */
121
static inline void
122
set_rtl (tree t, rtx x)
123
{
124
  if (TREE_CODE (t) == SSA_NAME)
125
    {
126
      SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
127
      if (x && !MEM_P (x))
128
        set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
129
      /* For the benefit of debug information at -O0 (where vartracking
130
         doesn't run) record the place also in the base DECL if it's
131
         a normal variable (not a parameter).  */
132
      if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
133
        {
134
          tree var = SSA_NAME_VAR (t);
135
          /* If we don't yet have something recorded, just record it now.  */
136
          if (!DECL_RTL_SET_P (var))
137
            SET_DECL_RTL (var, x);
138
          /* If we have it set already to "multiple places" don't
139
             change this.  */
140
          else if (DECL_RTL (var) == pc_rtx)
141
            ;
142
          /* If we have something recorded and it's not the same place
143
             as we want to record now, we have multiple partitions for the
144
             same base variable, with different places.  We can't just
145
             randomly chose one, hence we have to say that we don't know.
146
             This only happens with optimization, and there var-tracking
147
             will figure out the right thing.  */
148
          else if (DECL_RTL (var) != x)
149
            SET_DECL_RTL (var, pc_rtx);
150
        }
151
    }
152
  else
153
    SET_DECL_RTL (t, x);
154
}
155
 
156
/* This structure holds data relevant to one variable that will be
157
   placed in a stack slot.  */
158
struct stack_var
159
{
160
  /* The Variable.  */
161
  tree decl;
162
 
163
  /* Initially, the size of the variable.  Later, the size of the partition,
164
     if this variable becomes it's partition's representative.  */
165
  HOST_WIDE_INT size;
166
 
167
  /* The *byte* alignment required for this variable.  Or as, with the
168
     size, the alignment for this partition.  */
169
  unsigned int alignb;
170
 
171
  /* The partition representative.  */
172
  size_t representative;
173
 
174
  /* The next stack variable in the partition, or EOC.  */
175
  size_t next;
176
 
177
  /* The numbers of conflicting stack variables.  */
178
  bitmap conflicts;
179
};
180
 
181
#define EOC  ((size_t)-1)
182
 
183
/* We have an array of such objects while deciding allocation.  */
184
static struct stack_var *stack_vars;
185
static size_t stack_vars_alloc;
186
static size_t stack_vars_num;
187
static struct pointer_map_t *decl_to_stack_part;
188
 
189
/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
190
   is non-decreasing.  */
191
static size_t *stack_vars_sorted;
192
 
193
/* The phase of the stack frame.  This is the known misalignment of
194
   virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
195
   (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
196
static int frame_phase;
197
 
198
/* Used during expand_used_vars to remember if we saw any decls for
199
   which we'd like to enable stack smashing protection.  */
200
static bool has_protected_decls;
201
 
202
/* Used during expand_used_vars.  Remember if we say a character buffer
203
   smaller than our cutoff threshold.  Used for -Wstack-protector.  */
204
static bool has_short_buffer;
205
 
206
/* Compute the byte alignment to use for DECL.  Ignore alignment
207
   we can't do with expected alignment of the stack boundary.  */
208
 
209
static unsigned int
210
align_local_variable (tree decl)
211
{
212
  unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
213
  DECL_ALIGN (decl) = align;
214
  return align / BITS_PER_UNIT;
215
}
216
 
217
/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
218
   Return the frame offset.  */
219
 
220
static HOST_WIDE_INT
221
alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
222
{
223
  HOST_WIDE_INT offset, new_frame_offset;
224
 
225
  new_frame_offset = frame_offset;
226
  if (FRAME_GROWS_DOWNWARD)
227
    {
228
      new_frame_offset -= size + frame_phase;
229
      new_frame_offset &= -align;
230
      new_frame_offset += frame_phase;
231
      offset = new_frame_offset;
232
    }
233
  else
234
    {
235
      new_frame_offset -= frame_phase;
236
      new_frame_offset += align - 1;
237
      new_frame_offset &= -align;
238
      new_frame_offset += frame_phase;
239
      offset = new_frame_offset;
240
      new_frame_offset += size;
241
    }
242
  frame_offset = new_frame_offset;
243
 
244
  if (frame_offset_overflow (frame_offset, cfun->decl))
245
    frame_offset = offset = 0;
246
 
247
  return offset;
248
}
249
 
250
/* Accumulate DECL into STACK_VARS.  */
251
 
252
static void
253
add_stack_var (tree decl)
254
{
255
  struct stack_var *v;
256
 
257
  if (stack_vars_num >= stack_vars_alloc)
258
    {
259
      if (stack_vars_alloc)
260
        stack_vars_alloc = stack_vars_alloc * 3 / 2;
261
      else
262
        stack_vars_alloc = 32;
263
      stack_vars
264
        = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
265
    }
266
  if (!decl_to_stack_part)
267
    decl_to_stack_part = pointer_map_create ();
268
 
269
  v = &stack_vars[stack_vars_num];
270
  * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
271
 
272
  v->decl = decl;
273
  v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274
  /* Ensure that all variables have size, so that &a != &b for any two
275
     variables that are simultaneously live.  */
276
  if (v->size == 0)
277
    v->size = 1;
278
  v->alignb = align_local_variable (SSAVAR (decl));
279
  /* An alignment of zero can mightily confuse us later.  */
280
  gcc_assert (v->alignb != 0);
281
 
282
  /* All variables are initially in their own partition.  */
283
  v->representative = stack_vars_num;
284
  v->next = EOC;
285
 
286
  /* All variables initially conflict with no other.  */
287
  v->conflicts = NULL;
288
 
289
  /* Ensure that this decl doesn't get put onto the list twice.  */
290
  set_rtl (decl, pc_rtx);
291
 
292
  stack_vars_num++;
293
}
294
 
295
/* Make the decls associated with luid's X and Y conflict.  */
296
 
297
static void
298
add_stack_var_conflict (size_t x, size_t y)
299
{
300
  struct stack_var *a = &stack_vars[x];
301
  struct stack_var *b = &stack_vars[y];
302
  if (!a->conflicts)
303
    a->conflicts = BITMAP_ALLOC (NULL);
304
  if (!b->conflicts)
305
    b->conflicts = BITMAP_ALLOC (NULL);
306
  bitmap_set_bit (a->conflicts, y);
307
  bitmap_set_bit (b->conflicts, x);
308
}
309
 
310
/* Check whether the decls associated with luid's X and Y conflict.  */
311
 
312
static bool
313
stack_var_conflict_p (size_t x, size_t y)
314
{
315
  struct stack_var *a = &stack_vars[x];
316
  struct stack_var *b = &stack_vars[y];
317
  if (x == y)
318
    return false;
319
  /* Partitions containing an SSA name result from gimple registers
320
     with things like unsupported modes.  They are top-level and
321
     hence conflict with everything else.  */
322
  if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
323
    return true;
324
 
325
  if (!a->conflicts || !b->conflicts)
326
    return false;
327
  return bitmap_bit_p (a->conflicts, y);
328
}
329
 
330
/* Returns true if TYPE is or contains a union type.  */
331
 
332
static bool
333
aggregate_contains_union_type (tree type)
334
{
335
  tree field;
336
 
337
  if (TREE_CODE (type) == UNION_TYPE
338
      || TREE_CODE (type) == QUAL_UNION_TYPE)
339
    return true;
340
  if (TREE_CODE (type) == ARRAY_TYPE)
341
    return aggregate_contains_union_type (TREE_TYPE (type));
342
  if (TREE_CODE (type) != RECORD_TYPE)
343
    return false;
344
 
345
  for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
346
    if (TREE_CODE (field) == FIELD_DECL)
347
      if (aggregate_contains_union_type (TREE_TYPE (field)))
348
        return true;
349
 
350
  return false;
351
}
352
 
353
/* A subroutine of expand_used_vars.  If two variables X and Y have alias
354
   sets that do not conflict, then do add a conflict for these variables
355
   in the interference graph.  We also need to make sure to add conflicts
356
   for union containing structures.  Else RTL alias analysis comes along
357
   and due to type based aliasing rules decides that for two overlapping
358
   union temporaries { short s; int i; } accesses to the same mem through
359
   different types may not alias and happily reorders stores across
360
   life-time boundaries of the temporaries (See PR25654).  */
361
 
362
static void
363
add_alias_set_conflicts (void)
364
{
365
  size_t i, j, n = stack_vars_num;
366
 
367
  for (i = 0; i < n; ++i)
368
    {
369
      tree type_i = TREE_TYPE (stack_vars[i].decl);
370
      bool aggr_i = AGGREGATE_TYPE_P (type_i);
371
      bool contains_union;
372
 
373
      contains_union = aggregate_contains_union_type (type_i);
374
      for (j = 0; j < i; ++j)
375
        {
376
          tree type_j = TREE_TYPE (stack_vars[j].decl);
377
          bool aggr_j = AGGREGATE_TYPE_P (type_j);
378
          if (aggr_i != aggr_j
379
              /* Either the objects conflict by means of type based
380
                 aliasing rules, or we need to add a conflict.  */
381
              || !objects_must_conflict_p (type_i, type_j)
382
              /* In case the types do not conflict ensure that access
383
                 to elements will conflict.  In case of unions we have
384
                 to be careful as type based aliasing rules may say
385
                 access to the same memory does not conflict.  So play
386
                 safe and add a conflict in this case when
387
                 -fstrict-aliasing is used.  */
388
              || (contains_union && flag_strict_aliasing))
389
            add_stack_var_conflict (i, j);
390
        }
391
    }
392
}
393
 
394
/* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
395
   enter its partition number into bitmap DATA.  */
396
 
397
static bool
398
visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
399
{
400
  bitmap active = (bitmap)data;
401
  op = get_base_address (op);
402
  if (op
403
      && DECL_P (op)
404
      && DECL_RTL_IF_SET (op) == pc_rtx)
405
    {
406
      size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
407
      if (v)
408
        bitmap_set_bit (active, *v);
409
    }
410
  return false;
411
}
412
 
413
/* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
414
   record conflicts between it and all currently active other partitions
415
   from bitmap DATA.  */
416
 
417
static bool
418
visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
419
{
420
  bitmap active = (bitmap)data;
421
  op = get_base_address (op);
422
  if (op
423
      && DECL_P (op)
424
      && DECL_RTL_IF_SET (op) == pc_rtx)
425
    {
426
      size_t *v =
427
        (size_t *) pointer_map_contains (decl_to_stack_part, op);
428
      if (v && bitmap_set_bit (active, *v))
429
        {
430
          size_t num = *v;
431
          bitmap_iterator bi;
432
          unsigned i;
433
          gcc_assert (num < stack_vars_num);
434
          EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
435
            add_stack_var_conflict (num, i);
436
        }
437
    }
438
  return false;
439
}
440
 
441
/* Helper routine for add_scope_conflicts, calculating the active partitions
442
   at the end of BB, leaving the result in WORK.  We're called to generate
443
   conflicts when FOR_CONFLICT is true, otherwise we're just tracking
444
   liveness.  */
445
 
446
static void
447
add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
448
{
449
  edge e;
450
  edge_iterator ei;
451
  gimple_stmt_iterator gsi;
452
  bool (*visit)(gimple, tree, void *);
453
 
454
  bitmap_clear (work);
455
  FOR_EACH_EDGE (e, ei, bb->preds)
456
    bitmap_ior_into (work, (bitmap)e->src->aux);
457
 
458
  visit = visit_op;
459
 
460
  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
461
    {
462
      gimple stmt = gsi_stmt (gsi);
463
      walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
464
    }
465
  for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
466
    {
467
      gimple stmt = gsi_stmt (gsi);
468
 
469
      if (gimple_clobber_p (stmt))
470
        {
471
          tree lhs = gimple_assign_lhs (stmt);
472
          size_t *v;
473
          /* Nested function lowering might introduce LHSs
474
             that are COMPONENT_REFs.  */
475
          if (TREE_CODE (lhs) != VAR_DECL)
476
            continue;
477
          if (DECL_RTL_IF_SET (lhs) == pc_rtx
478
              && (v = (size_t *)
479
                  pointer_map_contains (decl_to_stack_part, lhs)))
480
            bitmap_clear_bit (work, *v);
481
        }
482
      else if (!is_gimple_debug (stmt))
483
        {
484
          if (for_conflict
485
              && visit == visit_op)
486
            {
487
              /* If this is the first real instruction in this BB we need
488
                 to add conflicts for everything live at this point now.
489
                 Unlike classical liveness for named objects we can't
490
                 rely on seeing a def/use of the names we're interested in.
491
                 There might merely be indirect loads/stores.  We'd not add any
492
                 conflicts for such partitions.  */
493
              bitmap_iterator bi;
494
              unsigned i;
495
              EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
496
                {
497
                  unsigned j;
498
                  bitmap_iterator bj;
499
                  EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
500
                    add_stack_var_conflict (i, j);
501
                }
502
              visit = visit_conflict;
503
            }
504
          walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
505
        }
506
    }
507
}
508
 
509
/* Generate stack partition conflicts between all partitions that are
510
   simultaneously live.  */
511
 
512
static void
513
add_scope_conflicts (void)
514
{
515
  basic_block bb;
516
  bool changed;
517
  bitmap work = BITMAP_ALLOC (NULL);
518
 
519
  /* We approximate the live range of a stack variable by taking the first
520
     mention of its name as starting point(s), and by the end-of-scope
521
     death clobber added by gimplify as ending point(s) of the range.
522
     This overapproximates in the case we for instance moved an address-taken
523
     operation upward, without also moving a dereference to it upwards.
524
     But it's conservatively correct as a variable never can hold values
525
     before its name is mentioned at least once.
526
 
527
     We then do a mostly classical bitmap liveness algorithm.  */
528
 
529
  FOR_ALL_BB (bb)
530
    bb->aux = BITMAP_ALLOC (NULL);
531
 
532
  changed = true;
533
  while (changed)
534
    {
535
      changed = false;
536
      FOR_EACH_BB (bb)
537
        {
538
          bitmap active = (bitmap)bb->aux;
539
          add_scope_conflicts_1 (bb, work, false);
540
          if (bitmap_ior_into (active, work))
541
            changed = true;
542
        }
543
    }
544
 
545
  FOR_EACH_BB (bb)
546
    add_scope_conflicts_1 (bb, work, true);
547
 
548
  BITMAP_FREE (work);
549
  FOR_ALL_BB (bb)
550
    BITMAP_FREE (bb->aux);
551
}
552
 
553
/* A subroutine of partition_stack_vars.  A comparison function for qsort,
554
   sorting an array of indices by the properties of the object.  */
555
 
556
static int
557
stack_var_cmp (const void *a, const void *b)
558
{
559
  size_t ia = *(const size_t *)a;
560
  size_t ib = *(const size_t *)b;
561
  unsigned int aligna = stack_vars[ia].alignb;
562
  unsigned int alignb = stack_vars[ib].alignb;
563
  HOST_WIDE_INT sizea = stack_vars[ia].size;
564
  HOST_WIDE_INT sizeb = stack_vars[ib].size;
565
  tree decla = stack_vars[ia].decl;
566
  tree declb = stack_vars[ib].decl;
567
  bool largea, largeb;
568
  unsigned int uida, uidb;
569
 
570
  /* Primary compare on "large" alignment.  Large comes first.  */
571
  largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
572
  largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
573
  if (largea != largeb)
574
    return (int)largeb - (int)largea;
575
 
576
  /* Secondary compare on size, decreasing  */
577
  if (sizea > sizeb)
578
    return -1;
579
  if (sizea < sizeb)
580
    return 1;
581
 
582
  /* Tertiary compare on true alignment, decreasing.  */
583
  if (aligna < alignb)
584
    return -1;
585
  if (aligna > alignb)
586
    return 1;
587
 
588
  /* Final compare on ID for sort stability, increasing.
589
     Two SSA names are compared by their version, SSA names come before
590
     non-SSA names, and two normal decls are compared by their DECL_UID.  */
591
  if (TREE_CODE (decla) == SSA_NAME)
592
    {
593
      if (TREE_CODE (declb) == SSA_NAME)
594
        uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
595
      else
596
        return -1;
597
    }
598
  else if (TREE_CODE (declb) == SSA_NAME)
599
    return 1;
600
  else
601
    uida = DECL_UID (decla), uidb = DECL_UID (declb);
602
  if (uida < uidb)
603
    return 1;
604
  if (uida > uidb)
605
    return -1;
606
  return 0;
607
}
608
 
609
 
610
/* If the points-to solution *PI points to variables that are in a partition
611
   together with other variables add all partition members to the pointed-to
612
   variables bitmap.  */
613
 
614
static void
615
add_partitioned_vars_to_ptset (struct pt_solution *pt,
616
                               struct pointer_map_t *decls_to_partitions,
617
                               struct pointer_set_t *visited, bitmap temp)
618
{
619
  bitmap_iterator bi;
620
  unsigned i;
621
  bitmap *part;
622
 
623
  if (pt->anything
624
      || pt->vars == NULL
625
      /* The pointed-to vars bitmap is shared, it is enough to
626
         visit it once.  */
627
      || pointer_set_insert(visited, pt->vars))
628
    return;
629
 
630
  bitmap_clear (temp);
631
 
632
  /* By using a temporary bitmap to store all members of the partitions
633
     we have to add we make sure to visit each of the partitions only
634
     once.  */
635
  EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
636
    if ((!temp
637
         || !bitmap_bit_p (temp, i))
638
        && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
639
                                                    (void *)(size_t) i)))
640
      bitmap_ior_into (temp, *part);
641
  if (!bitmap_empty_p (temp))
642
    bitmap_ior_into (pt->vars, temp);
643
}
644
 
645
/* Update points-to sets based on partition info, so we can use them on RTL.
646
   The bitmaps representing stack partitions will be saved until expand,
647
   where partitioned decls used as bases in memory expressions will be
648
   rewritten.  */
649
 
650
static void
651
update_alias_info_with_stack_vars (void)
652
{
653
  struct pointer_map_t *decls_to_partitions = NULL;
654
  size_t i, j;
655
  tree var = NULL_TREE;
656
 
657
  for (i = 0; i < stack_vars_num; i++)
658
    {
659
      bitmap part = NULL;
660
      tree name;
661
      struct ptr_info_def *pi;
662
 
663
      /* Not interested in partitions with single variable.  */
664
      if (stack_vars[i].representative != i
665
          || stack_vars[i].next == EOC)
666
        continue;
667
 
668
      if (!decls_to_partitions)
669
        {
670
          decls_to_partitions = pointer_map_create ();
671
          cfun->gimple_df->decls_to_pointers = pointer_map_create ();
672
        }
673
 
674
      /* Create an SSA_NAME that points to the partition for use
675
         as base during alias-oracle queries on RTL for bases that
676
         have been partitioned.  */
677
      if (var == NULL_TREE)
678
        var = create_tmp_var (ptr_type_node, NULL);
679
      name = make_ssa_name (var, NULL);
680
 
681
      /* Create bitmaps representing partitions.  They will be used for
682
         points-to sets later, so use GGC alloc.  */
683
      part = BITMAP_GGC_ALLOC ();
684
      for (j = i; j != EOC; j = stack_vars[j].next)
685
        {
686
          tree decl = stack_vars[j].decl;
687
          unsigned int uid = DECL_PT_UID (decl);
688
          /* We should never end up partitioning SSA names (though they
689
             may end up on the stack).  Neither should we allocate stack
690
             space to something that is unused and thus unreferenced, except
691
             for -O0 where we are preserving even unreferenced variables.  */
692
          gcc_assert (DECL_P (decl)
693
                      && (!optimize
694
                          || referenced_var_lookup (cfun, DECL_UID (decl))));
695
          bitmap_set_bit (part, uid);
696
          *((bitmap *) pointer_map_insert (decls_to_partitions,
697
                                           (void *)(size_t) uid)) = part;
698
          *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
699
                                         decl)) = name;
700
        }
701
 
702
      /* Make the SSA name point to all partition members.  */
703
      pi = get_ptr_info (name);
704
      pt_solution_set (&pi->pt, part, false);
705
    }
706
 
707
  /* Make all points-to sets that contain one member of a partition
708
     contain all members of the partition.  */
709
  if (decls_to_partitions)
710
    {
711
      unsigned i;
712
      struct pointer_set_t *visited = pointer_set_create ();
713
      bitmap temp = BITMAP_ALLOC (NULL);
714
 
715
      for (i = 1; i < num_ssa_names; i++)
716
        {
717
          tree name = ssa_name (i);
718
          struct ptr_info_def *pi;
719
 
720
          if (name
721
              && POINTER_TYPE_P (TREE_TYPE (name))
722
              && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
723
            add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
724
                                           visited, temp);
725
        }
726
 
727
      add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
728
                                     decls_to_partitions, visited, temp);
729
 
730
      pointer_set_destroy (visited);
731
      pointer_map_destroy (decls_to_partitions);
732
      BITMAP_FREE (temp);
733
    }
734
}
735
 
736
/* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
737
   partitioning algorithm.  Partitions A and B are known to be non-conflicting.
738
   Merge them into a single partition A.  */
739
 
740
static void
741
union_stack_vars (size_t a, size_t b)
742
{
743
  struct stack_var *vb = &stack_vars[b];
744
  bitmap_iterator bi;
745
  unsigned u;
746
 
747
  gcc_assert (stack_vars[b].next == EOC);
748
   /* Add B to A's partition.  */
749
  stack_vars[b].next = stack_vars[a].next;
750
  stack_vars[b].representative = a;
751
  stack_vars[a].next = b;
752
 
753
  /* Update the required alignment of partition A to account for B.  */
754
  if (stack_vars[a].alignb < stack_vars[b].alignb)
755
    stack_vars[a].alignb = stack_vars[b].alignb;
756
 
757
  /* Update the interference graph and merge the conflicts.  */
758
  if (vb->conflicts)
759
    {
760
      EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
761
        add_stack_var_conflict (a, stack_vars[u].representative);
762
      BITMAP_FREE (vb->conflicts);
763
    }
764
}
765
 
766
/* A subroutine of expand_used_vars.  Binpack the variables into
767
   partitions constrained by the interference graph.  The overall
768
   algorithm used is as follows:
769
 
770
        Sort the objects by size in descending order.
771
        For each object A {
772
          S = size(A)
773
          O = 0
774
          loop {
775
            Look for the largest non-conflicting object B with size <= S.
776
            UNION (A, B)
777
          }
778
        }
779
*/
780
 
781
static void
782
partition_stack_vars (void)
783
{
784
  size_t si, sj, n = stack_vars_num;
785
 
786
  stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
787
  for (si = 0; si < n; ++si)
788
    stack_vars_sorted[si] = si;
789
 
790
  if (n == 1)
791
    return;
792
 
793
  qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
794
 
795
  for (si = 0; si < n; ++si)
796
    {
797
      size_t i = stack_vars_sorted[si];
798
      unsigned int ialign = stack_vars[i].alignb;
799
 
800
      /* Ignore objects that aren't partition representatives. If we
801
         see a var that is not a partition representative, it must
802
         have been merged earlier.  */
803
      if (stack_vars[i].representative != i)
804
        continue;
805
 
806
      for (sj = si + 1; sj < n; ++sj)
807
        {
808
          size_t j = stack_vars_sorted[sj];
809
          unsigned int jalign = stack_vars[j].alignb;
810
 
811
          /* Ignore objects that aren't partition representatives.  */
812
          if (stack_vars[j].representative != j)
813
            continue;
814
 
815
          /* Ignore conflicting objects.  */
816
          if (stack_var_conflict_p (i, j))
817
            continue;
818
 
819
          /* Do not mix objects of "small" (supported) alignment
820
             and "large" (unsupported) alignment.  */
821
          if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
822
              != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
823
            continue;
824
 
825
          /* UNION the objects, placing J at OFFSET.  */
826
          union_stack_vars (i, j);
827
        }
828
    }
829
 
830
  update_alias_info_with_stack_vars ();
831
}
832
 
833
/* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
834
 
835
static void
836
dump_stack_var_partition (void)
837
{
838
  size_t si, i, j, n = stack_vars_num;
839
 
840
  for (si = 0; si < n; ++si)
841
    {
842
      i = stack_vars_sorted[si];
843
 
844
      /* Skip variables that aren't partition representatives, for now.  */
845
      if (stack_vars[i].representative != i)
846
        continue;
847
 
848
      fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
849
               " align %u\n", (unsigned long) i, stack_vars[i].size,
850
               stack_vars[i].alignb);
851
 
852
      for (j = i; j != EOC; j = stack_vars[j].next)
853
        {
854
          fputc ('\t', dump_file);
855
          print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
856
        }
857
      fputc ('\n', dump_file);
858
    }
859
}
860
 
861
/* Assign rtl to DECL at BASE + OFFSET.  */
862
 
863
static void
864
expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
865
                         HOST_WIDE_INT offset)
866
{
867
  unsigned align;
868
  rtx x;
869
 
870
  /* If this fails, we've overflowed the stack frame.  Error nicely?  */
871
  gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
872
 
873
  x = plus_constant (base, offset);
874
  x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
875
 
876
  if (TREE_CODE (decl) != SSA_NAME)
877
    {
878
      /* Set alignment we actually gave this decl if it isn't an SSA name.
879
         If it is we generate stack slots only accidentally so it isn't as
880
         important, we'll simply use the alignment that is already set.  */
881
      if (base == virtual_stack_vars_rtx)
882
        offset -= frame_phase;
883
      align = offset & -offset;
884
      align *= BITS_PER_UNIT;
885
      if (align == 0 || align > base_align)
886
        align = base_align;
887
 
888
      /* One would think that we could assert that we're not decreasing
889
         alignment here, but (at least) the i386 port does exactly this
890
         via the MINIMUM_ALIGNMENT hook.  */
891
 
892
      DECL_ALIGN (decl) = align;
893
      DECL_USER_ALIGN (decl) = 0;
894
    }
895
 
896
  set_mem_attributes (x, SSAVAR (decl), true);
897
  set_rtl (decl, x);
898
}
899
 
900
/* A subroutine of expand_used_vars.  Give each partition representative
901
   a unique location within the stack frame.  Update each partition member
902
   with that location.  */
903
 
904
static void
905
expand_stack_vars (bool (*pred) (tree))
906
{
907
  size_t si, i, j, n = stack_vars_num;
908
  HOST_WIDE_INT large_size = 0, large_alloc = 0;
909
  rtx large_base = NULL;
910
  unsigned large_align = 0;
911
  tree decl;
912
 
913
  /* Determine if there are any variables requiring "large" alignment.
914
     Since these are dynamically allocated, we only process these if
915
     no predicate involved.  */
916
  large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
917
  if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
918
    {
919
      /* Find the total size of these variables.  */
920
      for (si = 0; si < n; ++si)
921
        {
922
          unsigned alignb;
923
 
924
          i = stack_vars_sorted[si];
925
          alignb = stack_vars[i].alignb;
926
 
927
          /* Stop when we get to the first decl with "small" alignment.  */
928
          if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
929
            break;
930
 
931
          /* Skip variables that aren't partition representatives.  */
932
          if (stack_vars[i].representative != i)
933
            continue;
934
 
935
          /* Skip variables that have already had rtl assigned.  See also
936
             add_stack_var where we perpetrate this pc_rtx hack.  */
937
          decl = stack_vars[i].decl;
938
          if ((TREE_CODE (decl) == SSA_NAME
939
              ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
940
              : DECL_RTL (decl)) != pc_rtx)
941
            continue;
942
 
943
          large_size += alignb - 1;
944
          large_size &= -(HOST_WIDE_INT)alignb;
945
          large_size += stack_vars[i].size;
946
        }
947
 
948
      /* If there were any, allocate space.  */
949
      if (large_size > 0)
950
        large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
951
                                                   large_align, true);
952
    }
953
 
954
  for (si = 0; si < n; ++si)
955
    {
956
      rtx base;
957
      unsigned base_align, alignb;
958
      HOST_WIDE_INT offset;
959
 
960
      i = stack_vars_sorted[si];
961
 
962
      /* Skip variables that aren't partition representatives, for now.  */
963
      if (stack_vars[i].representative != i)
964
        continue;
965
 
966
      /* Skip variables that have already had rtl assigned.  See also
967
         add_stack_var where we perpetrate this pc_rtx hack.  */
968
      decl = stack_vars[i].decl;
969
      if ((TREE_CODE (decl) == SSA_NAME
970
           ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
971
           : DECL_RTL (decl)) != pc_rtx)
972
        continue;
973
 
974
      /* Check the predicate to see whether this variable should be
975
         allocated in this pass.  */
976
      if (pred && !pred (decl))
977
        continue;
978
 
979
      alignb = stack_vars[i].alignb;
980
      if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
981
        {
982
          offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
983
          base = virtual_stack_vars_rtx;
984
          base_align = crtl->max_used_stack_slot_alignment;
985
        }
986
      else
987
        {
988
          /* Large alignment is only processed in the last pass.  */
989
          if (pred)
990
            continue;
991
          gcc_assert (large_base != NULL);
992
 
993
          large_alloc += alignb - 1;
994
          large_alloc &= -(HOST_WIDE_INT)alignb;
995
          offset = large_alloc;
996
          large_alloc += stack_vars[i].size;
997
 
998
          base = large_base;
999
          base_align = large_align;
1000
        }
1001
 
1002
      /* Create rtl for each variable based on their location within the
1003
         partition.  */
1004
      for (j = i; j != EOC; j = stack_vars[j].next)
1005
        {
1006
          expand_one_stack_var_at (stack_vars[j].decl,
1007
                                   base, base_align,
1008
                                   offset);
1009
        }
1010
    }
1011
 
1012
  gcc_assert (large_alloc == large_size);
1013
}
1014
 
1015
/* Take into account all sizes of partitions and reset DECL_RTLs.  */
1016
static HOST_WIDE_INT
1017
account_stack_vars (void)
1018
{
1019
  size_t si, j, i, n = stack_vars_num;
1020
  HOST_WIDE_INT size = 0;
1021
 
1022
  for (si = 0; si < n; ++si)
1023
    {
1024
      i = stack_vars_sorted[si];
1025
 
1026
      /* Skip variables that aren't partition representatives, for now.  */
1027
      if (stack_vars[i].representative != i)
1028
        continue;
1029
 
1030
      size += stack_vars[i].size;
1031
      for (j = i; j != EOC; j = stack_vars[j].next)
1032
        set_rtl (stack_vars[j].decl, NULL);
1033
    }
1034
  return size;
1035
}
1036
 
1037
/* A subroutine of expand_one_var.  Called to immediately assign rtl
1038
   to a variable to be allocated in the stack frame.  */
1039
 
1040
static void
1041
expand_one_stack_var (tree var)
1042
{
1043
  HOST_WIDE_INT size, offset;
1044
  unsigned byte_align;
1045
 
1046
  size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1047
  byte_align = align_local_variable (SSAVAR (var));
1048
 
1049
  /* We handle highly aligned variables in expand_stack_vars.  */
1050
  gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1051
 
1052
  offset = alloc_stack_frame_space (size, byte_align);
1053
 
1054
  expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1055
                           crtl->max_used_stack_slot_alignment, offset);
1056
}
1057
 
1058
/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1059
   that will reside in a hard register.  */
1060
 
1061
static void
1062
expand_one_hard_reg_var (tree var)
1063
{
1064
  rest_of_decl_compilation (var, 0, 0);
1065
}
1066
 
1067
/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1068
   that will reside in a pseudo register.  */
1069
 
1070
static void
1071
expand_one_register_var (tree var)
1072
{
1073
  tree decl = SSAVAR (var);
1074
  tree type = TREE_TYPE (decl);
1075
  enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1076
  rtx x = gen_reg_rtx (reg_mode);
1077
 
1078
  set_rtl (var, x);
1079
 
1080
  /* Note if the object is a user variable.  */
1081
  if (!DECL_ARTIFICIAL (decl))
1082
    mark_user_reg (x);
1083
 
1084
  if (POINTER_TYPE_P (type))
1085
    mark_reg_pointer (x, get_pointer_alignment (var));
1086
}
1087
 
1088
/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
1089
   has some associated error, e.g. its type is error-mark.  We just need
1090
   to pick something that won't crash the rest of the compiler.  */
1091
 
1092
static void
1093
expand_one_error_var (tree var)
1094
{
1095
  enum machine_mode mode = DECL_MODE (var);
1096
  rtx x;
1097
 
1098
  if (mode == BLKmode)
1099
    x = gen_rtx_MEM (BLKmode, const0_rtx);
1100
  else if (mode == VOIDmode)
1101
    x = const0_rtx;
1102
  else
1103
    x = gen_reg_rtx (mode);
1104
 
1105
  SET_DECL_RTL (var, x);
1106
}
1107
 
1108
/* A subroutine of expand_one_var.  VAR is a variable that will be
1109
   allocated to the local stack frame.  Return true if we wish to
1110
   add VAR to STACK_VARS so that it will be coalesced with other
1111
   variables.  Return false to allocate VAR immediately.
1112
 
1113
   This function is used to reduce the number of variables considered
1114
   for coalescing, which reduces the size of the quadratic problem.  */
1115
 
1116
static bool
1117
defer_stack_allocation (tree var, bool toplevel)
1118
{
1119
  /* If stack protection is enabled, *all* stack variables must be deferred,
1120
     so that we can re-order the strings to the top of the frame.  */
1121
  if (flag_stack_protect)
1122
    return true;
1123
 
1124
  /* We handle "large" alignment via dynamic allocation.  We want to handle
1125
     this extra complication in only one place, so defer them.  */
1126
  if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1127
    return true;
1128
 
1129
  /* Variables in the outermost scope automatically conflict with
1130
     every other variable.  The only reason to want to defer them
1131
     at all is that, after sorting, we can more efficiently pack
1132
     small variables in the stack frame.  Continue to defer at -O2.  */
1133
  if (toplevel && optimize < 2)
1134
    return false;
1135
 
1136
  /* Without optimization, *most* variables are allocated from the
1137
     stack, which makes the quadratic problem large exactly when we
1138
     want compilation to proceed as quickly as possible.  On the
1139
     other hand, we don't want the function's stack frame size to
1140
     get completely out of hand.  So we avoid adding scalars and
1141
     "small" aggregates to the list at all.  */
1142
  if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1143
    return false;
1144
 
1145
  return true;
1146
}
1147
 
1148
/* A subroutine of expand_used_vars.  Expand one variable according to
1149
   its flavor.  Variables to be placed on the stack are not actually
1150
   expanded yet, merely recorded.
1151
   When REALLY_EXPAND is false, only add stack values to be allocated.
1152
   Return stack usage this variable is supposed to take.
1153
*/
1154
 
1155
static HOST_WIDE_INT
1156
expand_one_var (tree var, bool toplevel, bool really_expand)
1157
{
1158
  unsigned int align = BITS_PER_UNIT;
1159
  tree origvar = var;
1160
 
1161
  var = SSAVAR (var);
1162
 
1163
  if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1164
    {
1165
      /* Because we don't know if VAR will be in register or on stack,
1166
         we conservatively assume it will be on stack even if VAR is
1167
         eventually put into register after RA pass.  For non-automatic
1168
         variables, which won't be on stack, we collect alignment of
1169
         type and ignore user specified alignment.  */
1170
      if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1171
        align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1172
                                   TYPE_MODE (TREE_TYPE (var)),
1173
                                   TYPE_ALIGN (TREE_TYPE (var)));
1174
      else if (DECL_HAS_VALUE_EXPR_P (var)
1175
               || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1176
        /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1177
           or variables which were assigned a stack slot already by
1178
           expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1179
           changed from the offset chosen to it.  */
1180
        align = crtl->stack_alignment_estimated;
1181
      else
1182
        align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1183
 
1184
      /* If the variable alignment is very large we'll dynamicaly allocate
1185
         it, which means that in-frame portion is just a pointer.  */
1186
      if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1187
        align = POINTER_SIZE;
1188
    }
1189
 
1190
  if (SUPPORTS_STACK_ALIGNMENT
1191
      && crtl->stack_alignment_estimated < align)
1192
    {
1193
      /* stack_alignment_estimated shouldn't change after stack
1194
         realign decision made */
1195
      gcc_assert(!crtl->stack_realign_processed);
1196
      crtl->stack_alignment_estimated = align;
1197
    }
1198
 
1199
  /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1200
     So here we only make sure stack_alignment_needed >= align.  */
1201
  if (crtl->stack_alignment_needed < align)
1202
    crtl->stack_alignment_needed = align;
1203
  if (crtl->max_used_stack_slot_alignment < align)
1204
    crtl->max_used_stack_slot_alignment = align;
1205
 
1206
  if (TREE_CODE (origvar) == SSA_NAME)
1207
    {
1208
      gcc_assert (TREE_CODE (var) != VAR_DECL
1209
                  || (!DECL_EXTERNAL (var)
1210
                      && !DECL_HAS_VALUE_EXPR_P (var)
1211
                      && !TREE_STATIC (var)
1212
                      && TREE_TYPE (var) != error_mark_node
1213
                      && !DECL_HARD_REGISTER (var)
1214
                      && really_expand));
1215
    }
1216
  if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1217
    ;
1218
  else if (DECL_EXTERNAL (var))
1219
    ;
1220
  else if (DECL_HAS_VALUE_EXPR_P (var))
1221
    ;
1222
  else if (TREE_STATIC (var))
1223
    ;
1224
  else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1225
    ;
1226
  else if (TREE_TYPE (var) == error_mark_node)
1227
    {
1228
      if (really_expand)
1229
        expand_one_error_var (var);
1230
    }
1231
  else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1232
    {
1233
      if (really_expand)
1234
        expand_one_hard_reg_var (var);
1235
    }
1236
  else if (use_register_for_decl (var))
1237
    {
1238
      if (really_expand)
1239
        expand_one_register_var (origvar);
1240
    }
1241
  else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1242
    {
1243
      if (really_expand)
1244
        {
1245
          error ("size of variable %q+D is too large", var);
1246
          expand_one_error_var (var);
1247
        }
1248
    }
1249
  else if (defer_stack_allocation (var, toplevel))
1250
    add_stack_var (origvar);
1251
  else
1252
    {
1253
      if (really_expand)
1254
        expand_one_stack_var (origvar);
1255
      return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1256
    }
1257
  return 0;
1258
}
1259
 
1260
/* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1261
   expanding variables.  Those variables that can be put into registers
1262
   are allocated pseudos; those that can't are put on the stack.
1263
 
1264
   TOPLEVEL is true if this is the outermost BLOCK.  */
1265
 
1266
static void
1267
expand_used_vars_for_block (tree block, bool toplevel)
1268
{
1269
  tree t;
1270
 
1271
  /* Expand all variables at this level.  */
1272
  for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1273
    if (TREE_USED (t)
1274
        && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1275
            || !DECL_NONSHAREABLE (t)))
1276
      expand_one_var (t, toplevel, true);
1277
 
1278
  /* Expand all variables at containing levels.  */
1279
  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1280
    expand_used_vars_for_block (t, false);
1281
}
1282
 
1283
/* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1284
   and clear TREE_USED on all local variables.  */
1285
 
1286
static void
1287
clear_tree_used (tree block)
1288
{
1289
  tree t;
1290
 
1291
  for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1292
    /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1293
    if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1294
        || !DECL_NONSHAREABLE (t))
1295
      TREE_USED (t) = 0;
1296
 
1297
  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1298
    clear_tree_used (t);
1299
}
1300
 
1301
/* Examine TYPE and determine a bit mask of the following features.  */
1302
 
1303
#define SPCT_HAS_LARGE_CHAR_ARRAY       1
1304
#define SPCT_HAS_SMALL_CHAR_ARRAY       2
1305
#define SPCT_HAS_ARRAY                  4
1306
#define SPCT_HAS_AGGREGATE              8
1307
 
1308
static unsigned int
1309
stack_protect_classify_type (tree type)
1310
{
1311
  unsigned int ret = 0;
1312
  tree t;
1313
 
1314
  switch (TREE_CODE (type))
1315
    {
1316
    case ARRAY_TYPE:
1317
      t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1318
      if (t == char_type_node
1319
          || t == signed_char_type_node
1320
          || t == unsigned_char_type_node)
1321
        {
1322
          unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1323
          unsigned HOST_WIDE_INT len;
1324
 
1325
          if (!TYPE_SIZE_UNIT (type)
1326
              || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1327
            len = max;
1328
          else
1329
            len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1330
 
1331
          if (len < max)
1332
            ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1333
          else
1334
            ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1335
        }
1336
      else
1337
        ret = SPCT_HAS_ARRAY;
1338
      break;
1339
 
1340
    case UNION_TYPE:
1341
    case QUAL_UNION_TYPE:
1342
    case RECORD_TYPE:
1343
      ret = SPCT_HAS_AGGREGATE;
1344
      for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1345
        if (TREE_CODE (t) == FIELD_DECL)
1346
          ret |= stack_protect_classify_type (TREE_TYPE (t));
1347
      break;
1348
 
1349
    default:
1350
      break;
1351
    }
1352
 
1353
  return ret;
1354
}
1355
 
1356
/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1357
   part of the local stack frame.  Remember if we ever return nonzero for
1358
   any variable in this function.  The return value is the phase number in
1359
   which the variable should be allocated.  */
1360
 
1361
static int
1362
stack_protect_decl_phase (tree decl)
1363
{
1364
  unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1365
  int ret = 0;
1366
 
1367
  if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1368
    has_short_buffer = true;
1369
 
1370
  if (flag_stack_protect == 2)
1371
    {
1372
      if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1373
          && !(bits & SPCT_HAS_AGGREGATE))
1374
        ret = 1;
1375
      else if (bits & SPCT_HAS_ARRAY)
1376
        ret = 2;
1377
    }
1378
  else
1379
    ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1380
 
1381
  if (ret)
1382
    has_protected_decls = true;
1383
 
1384
  return ret;
1385
}
1386
 
1387
/* Two helper routines that check for phase 1 and phase 2.  These are used
1388
   as callbacks for expand_stack_vars.  */
1389
 
1390
static bool
1391
stack_protect_decl_phase_1 (tree decl)
1392
{
1393
  return stack_protect_decl_phase (decl) == 1;
1394
}
1395
 
1396
static bool
1397
stack_protect_decl_phase_2 (tree decl)
1398
{
1399
  return stack_protect_decl_phase (decl) == 2;
1400
}
1401
 
1402
/* Ensure that variables in different stack protection phases conflict
1403
   so that they are not merged and share the same stack slot.  */
1404
 
1405
static void
1406
add_stack_protection_conflicts (void)
1407
{
1408
  size_t i, j, n = stack_vars_num;
1409
  unsigned char *phase;
1410
 
1411
  phase = XNEWVEC (unsigned char, n);
1412
  for (i = 0; i < n; ++i)
1413
    phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1414
 
1415
  for (i = 0; i < n; ++i)
1416
    {
1417
      unsigned char ph_i = phase[i];
1418
      for (j = 0; j < i; ++j)
1419
        if (ph_i != phase[j])
1420
          add_stack_var_conflict (i, j);
1421
    }
1422
 
1423
  XDELETEVEC (phase);
1424
}
1425
 
1426
/* Create a decl for the guard at the top of the stack frame.  */
1427
 
1428
static void
1429
create_stack_guard (void)
1430
{
1431
  tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1432
                           VAR_DECL, NULL, ptr_type_node);
1433
  TREE_THIS_VOLATILE (guard) = 1;
1434
  TREE_USED (guard) = 1;
1435
  expand_one_stack_var (guard);
1436
  crtl->stack_protect_guard = guard;
1437
}
1438
 
1439
/* Prepare for expanding variables.  */
1440
static void
1441
init_vars_expansion (void)
1442
{
1443
  tree t;
1444
  unsigned ix;
1445
  /* Set TREE_USED on all variables in the local_decls.  */
1446
  FOR_EACH_LOCAL_DECL (cfun, ix, t)
1447
    TREE_USED (t) = 1;
1448
 
1449
  /* Clear TREE_USED on all variables associated with a block scope.  */
1450
  clear_tree_used (DECL_INITIAL (current_function_decl));
1451
 
1452
  /* Initialize local stack smashing state.  */
1453
  has_protected_decls = false;
1454
  has_short_buffer = false;
1455
}
1456
 
1457
/* Free up stack variable graph data.  */
1458
static void
1459
fini_vars_expansion (void)
1460
{
1461
  size_t i, n = stack_vars_num;
1462
  for (i = 0; i < n; i++)
1463
    BITMAP_FREE (stack_vars[i].conflicts);
1464
  XDELETEVEC (stack_vars);
1465
  XDELETEVEC (stack_vars_sorted);
1466
  stack_vars = NULL;
1467
  stack_vars_alloc = stack_vars_num = 0;
1468
  pointer_map_destroy (decl_to_stack_part);
1469
  decl_to_stack_part = NULL;
1470
}
1471
 
1472
/* Make a fair guess for the size of the stack frame of the function
1473
   in NODE.  This doesn't have to be exact, the result is only used in
1474
   the inline heuristics.  So we don't want to run the full stack var
1475
   packing algorithm (which is quadratic in the number of stack vars).
1476
   Instead, we calculate the total size of all stack vars.  This turns
1477
   out to be a pretty fair estimate -- packing of stack vars doesn't
1478
   happen very often.  */
1479
 
1480
HOST_WIDE_INT
1481
estimated_stack_frame_size (struct cgraph_node *node)
1482
{
1483
  HOST_WIDE_INT size = 0;
1484
  size_t i;
1485
  tree var;
1486
  tree old_cur_fun_decl = current_function_decl;
1487
  referenced_var_iterator rvi;
1488
  struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1489
 
1490
  current_function_decl = node->decl;
1491
  push_cfun (fn);
1492
 
1493
  gcc_checking_assert (gimple_referenced_vars (fn));
1494
  FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1495
    size += expand_one_var (var, true, false);
1496
 
1497
  if (stack_vars_num > 0)
1498
    {
1499
      /* Fake sorting the stack vars for account_stack_vars ().  */
1500
      stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1501
      for (i = 0; i < stack_vars_num; ++i)
1502
        stack_vars_sorted[i] = i;
1503
      size += account_stack_vars ();
1504
      fini_vars_expansion ();
1505
    }
1506
  pop_cfun ();
1507
  current_function_decl = old_cur_fun_decl;
1508
  return size;
1509
}
1510
 
1511
/* Expand all variables used in the function.  */
1512
 
1513
static void
1514
expand_used_vars (void)
1515
{
1516
  tree var, outer_block = DECL_INITIAL (current_function_decl);
1517
  VEC(tree,heap) *maybe_local_decls = NULL;
1518
  unsigned i;
1519
  unsigned len;
1520
 
1521
  /* Compute the phase of the stack frame for this function.  */
1522
  {
1523
    int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1524
    int off = STARTING_FRAME_OFFSET % align;
1525
    frame_phase = off ? align - off : 0;
1526
  }
1527
 
1528
  init_vars_expansion ();
1529
 
1530
  for (i = 0; i < SA.map->num_partitions; i++)
1531
    {
1532
      tree var = partition_to_var (SA.map, i);
1533
 
1534
      gcc_assert (is_gimple_reg (var));
1535
      if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1536
        expand_one_var (var, true, true);
1537
      else
1538
        {
1539
          /* This is a PARM_DECL or RESULT_DECL.  For those partitions that
1540
             contain the default def (representing the parm or result itself)
1541
             we don't do anything here.  But those which don't contain the
1542
             default def (representing a temporary based on the parm/result)
1543
             we need to allocate space just like for normal VAR_DECLs.  */
1544
          if (!bitmap_bit_p (SA.partition_has_default_def, i))
1545
            {
1546
              expand_one_var (var, true, true);
1547
              gcc_assert (SA.partition_to_pseudo[i]);
1548
            }
1549
        }
1550
    }
1551
 
1552
  /* At this point all variables on the local_decls with TREE_USED
1553
     set are not associated with any block scope.  Lay them out.  */
1554
 
1555
  len = VEC_length (tree, cfun->local_decls);
1556
  FOR_EACH_LOCAL_DECL (cfun, i, var)
1557
    {
1558
      bool expand_now = false;
1559
 
1560
      /* Expanded above already.  */
1561
      if (is_gimple_reg (var))
1562
        {
1563
          TREE_USED (var) = 0;
1564
          goto next;
1565
        }
1566
      /* We didn't set a block for static or extern because it's hard
1567
         to tell the difference between a global variable (re)declared
1568
         in a local scope, and one that's really declared there to
1569
         begin with.  And it doesn't really matter much, since we're
1570
         not giving them stack space.  Expand them now.  */
1571
      else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1572
        expand_now = true;
1573
 
1574
      /* If the variable is not associated with any block, then it
1575
         was created by the optimizers, and could be live anywhere
1576
         in the function.  */
1577
      else if (TREE_USED (var))
1578
        expand_now = true;
1579
 
1580
      /* Finally, mark all variables on the list as used.  We'll use
1581
         this in a moment when we expand those associated with scopes.  */
1582
      TREE_USED (var) = 1;
1583
 
1584
      if (expand_now)
1585
        expand_one_var (var, true, true);
1586
 
1587
    next:
1588
      if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1589
        {
1590
          rtx rtl = DECL_RTL_IF_SET (var);
1591
 
1592
          /* Keep artificial non-ignored vars in cfun->local_decls
1593
             chain until instantiate_decls.  */
1594
          if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1595
            add_local_decl (cfun, var);
1596
          else if (rtl == NULL_RTX)
1597
            /* If rtl isn't set yet, which can happen e.g. with
1598
               -fstack-protector, retry before returning from this
1599
               function.  */
1600
            VEC_safe_push (tree, heap, maybe_local_decls, var);
1601
        }
1602
    }
1603
 
1604
  /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1605
 
1606
     +-----------------+-----------------+
1607
     | ...processed... | ...duplicates...|
1608
     +-----------------+-----------------+
1609
                       ^
1610
                       +-- LEN points here.
1611
 
1612
     We just want the duplicates, as those are the artificial
1613
     non-ignored vars that we want to keep until instantiate_decls.
1614
     Move them down and truncate the array.  */
1615
  if (!VEC_empty (tree, cfun->local_decls))
1616
    VEC_block_remove (tree, cfun->local_decls, 0, len);
1617
 
1618
  /* At this point, all variables within the block tree with TREE_USED
1619
     set are actually used by the optimized function.  Lay them out.  */
1620
  expand_used_vars_for_block (outer_block, true);
1621
 
1622
  if (stack_vars_num > 0)
1623
    {
1624
      add_scope_conflicts ();
1625
      /* Due to the way alias sets work, no variables with non-conflicting
1626
         alias sets may be assigned the same address.  Add conflicts to
1627
         reflect this.  */
1628
      add_alias_set_conflicts ();
1629
 
1630
      /* If stack protection is enabled, we don't share space between
1631
         vulnerable data and non-vulnerable data.  */
1632
      if (flag_stack_protect)
1633
        add_stack_protection_conflicts ();
1634
 
1635
      /* Now that we have collected all stack variables, and have computed a
1636
         minimal interference graph, attempt to save some stack space.  */
1637
      partition_stack_vars ();
1638
      if (dump_file)
1639
        dump_stack_var_partition ();
1640
    }
1641
 
1642
  /* There are several conditions under which we should create a
1643
     stack guard: protect-all, alloca used, protected decls present.  */
1644
  if (flag_stack_protect == 2
1645
      || (flag_stack_protect
1646
          && (cfun->calls_alloca || has_protected_decls)))
1647
    create_stack_guard ();
1648
 
1649
  /* Assign rtl to each variable based on these partitions.  */
1650
  if (stack_vars_num > 0)
1651
    {
1652
      /* Reorder decls to be protected by iterating over the variables
1653
         array multiple times, and allocating out of each phase in turn.  */
1654
      /* ??? We could probably integrate this into the qsort we did
1655
         earlier, such that we naturally see these variables first,
1656
         and thus naturally allocate things in the right order.  */
1657
      if (has_protected_decls)
1658
        {
1659
          /* Phase 1 contains only character arrays.  */
1660
          expand_stack_vars (stack_protect_decl_phase_1);
1661
 
1662
          /* Phase 2 contains other kinds of arrays.  */
1663
          if (flag_stack_protect == 2)
1664
            expand_stack_vars (stack_protect_decl_phase_2);
1665
        }
1666
 
1667
      expand_stack_vars (NULL);
1668
 
1669
      fini_vars_expansion ();
1670
    }
1671
 
1672
  /* If there were any artificial non-ignored vars without rtl
1673
     found earlier, see if deferred stack allocation hasn't assigned
1674
     rtl to them.  */
1675
  FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1676
    {
1677
      rtx rtl = DECL_RTL_IF_SET (var);
1678
 
1679
      /* Keep artificial non-ignored vars in cfun->local_decls
1680
         chain until instantiate_decls.  */
1681
      if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1682
        add_local_decl (cfun, var);
1683
    }
1684
  VEC_free (tree, heap, maybe_local_decls);
1685
 
1686
  /* If the target requires that FRAME_OFFSET be aligned, do it.  */
1687
  if (STACK_ALIGNMENT_NEEDED)
1688
    {
1689
      HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1690
      if (!FRAME_GROWS_DOWNWARD)
1691
        frame_offset += align - 1;
1692
      frame_offset &= -align;
1693
    }
1694
}
1695
 
1696
 
1697
/* If we need to produce a detailed dump, print the tree representation
1698
   for STMT to the dump file.  SINCE is the last RTX after which the RTL
1699
   generated for STMT should have been appended.  */
1700
 
1701
static void
1702
maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1703
{
1704
  if (dump_file && (dump_flags & TDF_DETAILS))
1705
    {
1706
      fprintf (dump_file, "\n;; ");
1707
      print_gimple_stmt (dump_file, stmt, 0,
1708
                         TDF_SLIM | (dump_flags & TDF_LINENO));
1709
      fprintf (dump_file, "\n");
1710
 
1711
      print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1712
    }
1713
}
1714
 
1715
/* Maps the blocks that do not contain tree labels to rtx labels.  */
1716
 
1717
static struct pointer_map_t *lab_rtx_for_bb;
1718
 
1719
/* Returns the label_rtx expression for a label starting basic block BB.  */
1720
 
1721
static rtx
1722
label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1723
{
1724
  gimple_stmt_iterator gsi;
1725
  tree lab;
1726
  gimple lab_stmt;
1727
  void **elt;
1728
 
1729
  if (bb->flags & BB_RTL)
1730
    return block_label (bb);
1731
 
1732
  elt = pointer_map_contains (lab_rtx_for_bb, bb);
1733
  if (elt)
1734
    return (rtx) *elt;
1735
 
1736
  /* Find the tree label if it is present.  */
1737
 
1738
  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1739
    {
1740
      lab_stmt = gsi_stmt (gsi);
1741
      if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1742
        break;
1743
 
1744
      lab = gimple_label_label (lab_stmt);
1745
      if (DECL_NONLOCAL (lab))
1746
        break;
1747
 
1748
      return label_rtx (lab);
1749
    }
1750
 
1751
  elt = pointer_map_insert (lab_rtx_for_bb, bb);
1752
  *elt = gen_label_rtx ();
1753
  return (rtx) *elt;
1754
}
1755
 
1756
 
1757
/* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
1758
   of a basic block where we just expanded the conditional at the end,
1759
   possibly clean up the CFG and instruction sequence.  LAST is the
1760
   last instruction before the just emitted jump sequence.  */
1761
 
1762
static void
1763
maybe_cleanup_end_of_block (edge e, rtx last)
1764
{
1765
  /* Special case: when jumpif decides that the condition is
1766
     trivial it emits an unconditional jump (and the necessary
1767
     barrier).  But we still have two edges, the fallthru one is
1768
     wrong.  purge_dead_edges would clean this up later.  Unfortunately
1769
     we have to insert insns (and split edges) before
1770
     find_many_sub_basic_blocks and hence before purge_dead_edges.
1771
     But splitting edges might create new blocks which depend on the
1772
     fact that if there are two edges there's no barrier.  So the
1773
     barrier would get lost and verify_flow_info would ICE.  Instead
1774
     of auditing all edge splitters to care for the barrier (which
1775
     normally isn't there in a cleaned CFG), fix it here.  */
1776
  if (BARRIER_P (get_last_insn ()))
1777
    {
1778
      rtx insn;
1779
      remove_edge (e);
1780
      /* Now, we have a single successor block, if we have insns to
1781
         insert on the remaining edge we potentially will insert
1782
         it at the end of this block (if the dest block isn't feasible)
1783
         in order to avoid splitting the edge.  This insertion will take
1784
         place in front of the last jump.  But we might have emitted
1785
         multiple jumps (conditional and one unconditional) to the
1786
         same destination.  Inserting in front of the last one then
1787
         is a problem.  See PR 40021.  We fix this by deleting all
1788
         jumps except the last unconditional one.  */
1789
      insn = PREV_INSN (get_last_insn ());
1790
      /* Make sure we have an unconditional jump.  Otherwise we're
1791
         confused.  */
1792
      gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1793
      for (insn = PREV_INSN (insn); insn != last;)
1794
        {
1795
          insn = PREV_INSN (insn);
1796
          if (JUMP_P (NEXT_INSN (insn)))
1797
            {
1798
              if (!any_condjump_p (NEXT_INSN (insn)))
1799
                {
1800
                  gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1801
                  delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1802
                }
1803
              delete_insn (NEXT_INSN (insn));
1804
            }
1805
        }
1806
    }
1807
}
1808
 
1809
/* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
1810
   Returns a new basic block if we've terminated the current basic
1811
   block and created a new one.  */
1812
 
1813
static basic_block
1814
expand_gimple_cond (basic_block bb, gimple stmt)
1815
{
1816
  basic_block new_bb, dest;
1817
  edge new_edge;
1818
  edge true_edge;
1819
  edge false_edge;
1820
  rtx last2, last;
1821
  enum tree_code code;
1822
  tree op0, op1;
1823
 
1824
  code = gimple_cond_code (stmt);
1825
  op0 = gimple_cond_lhs (stmt);
1826
  op1 = gimple_cond_rhs (stmt);
1827
  /* We're sometimes presented with such code:
1828
       D.123_1 = x < y;
1829
       if (D.123_1 != 0)
1830
         ...
1831
     This would expand to two comparisons which then later might
1832
     be cleaned up by combine.  But some pattern matchers like if-conversion
1833
     work better when there's only one compare, so make up for this
1834
     here as special exception if TER would have made the same change.  */
1835
  if (gimple_cond_single_var_p (stmt)
1836
      && SA.values
1837
      && TREE_CODE (op0) == SSA_NAME
1838
      && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1839
    {
1840
      gimple second = SSA_NAME_DEF_STMT (op0);
1841
      if (gimple_code (second) == GIMPLE_ASSIGN)
1842
        {
1843
          enum tree_code code2 = gimple_assign_rhs_code (second);
1844
          if (TREE_CODE_CLASS (code2) == tcc_comparison)
1845
            {
1846
              code = code2;
1847
              op0 = gimple_assign_rhs1 (second);
1848
              op1 = gimple_assign_rhs2 (second);
1849
            }
1850
          /* If jumps are cheap turn some more codes into
1851
             jumpy sequences.  */
1852
          else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1853
            {
1854
              if ((code2 == BIT_AND_EXPR
1855
                   && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1856
                   && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1857
                  || code2 == TRUTH_AND_EXPR)
1858
                {
1859
                  code = TRUTH_ANDIF_EXPR;
1860
                  op0 = gimple_assign_rhs1 (second);
1861
                  op1 = gimple_assign_rhs2 (second);
1862
                }
1863
              else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1864
                {
1865
                  code = TRUTH_ORIF_EXPR;
1866
                  op0 = gimple_assign_rhs1 (second);
1867
                  op1 = gimple_assign_rhs2 (second);
1868
                }
1869
            }
1870
        }
1871
    }
1872
 
1873
  last2 = last = get_last_insn ();
1874
 
1875
  extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1876
  set_curr_insn_source_location (gimple_location (stmt));
1877
  set_curr_insn_block (gimple_block (stmt));
1878
 
1879
  /* These flags have no purpose in RTL land.  */
1880
  true_edge->flags &= ~EDGE_TRUE_VALUE;
1881
  false_edge->flags &= ~EDGE_FALSE_VALUE;
1882
 
1883
  /* We can either have a pure conditional jump with one fallthru edge or
1884
     two-way jump that needs to be decomposed into two basic blocks.  */
1885
  if (false_edge->dest == bb->next_bb)
1886
    {
1887
      jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1888
                true_edge->probability);
1889
      maybe_dump_rtl_for_gimple_stmt (stmt, last);
1890
      if (true_edge->goto_locus)
1891
        {
1892
          set_curr_insn_source_location (true_edge->goto_locus);
1893
          set_curr_insn_block (true_edge->goto_block);
1894
          true_edge->goto_locus = curr_insn_locator ();
1895
        }
1896
      true_edge->goto_block = NULL;
1897
      false_edge->flags |= EDGE_FALLTHRU;
1898
      maybe_cleanup_end_of_block (false_edge, last);
1899
      return NULL;
1900
    }
1901
  if (true_edge->dest == bb->next_bb)
1902
    {
1903
      jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1904
                   false_edge->probability);
1905
      maybe_dump_rtl_for_gimple_stmt (stmt, last);
1906
      if (false_edge->goto_locus)
1907
        {
1908
          set_curr_insn_source_location (false_edge->goto_locus);
1909
          set_curr_insn_block (false_edge->goto_block);
1910
          false_edge->goto_locus = curr_insn_locator ();
1911
        }
1912
      false_edge->goto_block = NULL;
1913
      true_edge->flags |= EDGE_FALLTHRU;
1914
      maybe_cleanup_end_of_block (true_edge, last);
1915
      return NULL;
1916
    }
1917
 
1918
  jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1919
            true_edge->probability);
1920
  last = get_last_insn ();
1921
  if (false_edge->goto_locus)
1922
    {
1923
      set_curr_insn_source_location (false_edge->goto_locus);
1924
      set_curr_insn_block (false_edge->goto_block);
1925
      false_edge->goto_locus = curr_insn_locator ();
1926
    }
1927
  false_edge->goto_block = NULL;
1928
  emit_jump (label_rtx_for_bb (false_edge->dest));
1929
 
1930
  BB_END (bb) = last;
1931
  if (BARRIER_P (BB_END (bb)))
1932
    BB_END (bb) = PREV_INSN (BB_END (bb));
1933
  update_bb_for_insn (bb);
1934
 
1935
  new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1936
  dest = false_edge->dest;
1937
  redirect_edge_succ (false_edge, new_bb);
1938
  false_edge->flags |= EDGE_FALLTHRU;
1939
  new_bb->count = false_edge->count;
1940
  new_bb->frequency = EDGE_FREQUENCY (false_edge);
1941
  new_edge = make_edge (new_bb, dest, 0);
1942
  new_edge->probability = REG_BR_PROB_BASE;
1943
  new_edge->count = new_bb->count;
1944
  if (BARRIER_P (BB_END (new_bb)))
1945
    BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1946
  update_bb_for_insn (new_bb);
1947
 
1948
  maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1949
 
1950
  if (true_edge->goto_locus)
1951
    {
1952
      set_curr_insn_source_location (true_edge->goto_locus);
1953
      set_curr_insn_block (true_edge->goto_block);
1954
      true_edge->goto_locus = curr_insn_locator ();
1955
    }
1956
  true_edge->goto_block = NULL;
1957
 
1958
  return new_bb;
1959
}
1960
 
1961
/* Mark all calls that can have a transaction restart.  */
1962
 
1963
static void
1964
mark_transaction_restart_calls (gimple stmt)
1965
{
1966
  struct tm_restart_node dummy;
1967
  void **slot;
1968
 
1969
  if (!cfun->gimple_df->tm_restart)
1970
    return;
1971
 
1972
  dummy.stmt = stmt;
1973
  slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1974
  if (slot)
1975
    {
1976
      struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1977
      tree list = n->label_or_list;
1978
      rtx insn;
1979
 
1980
      for (insn = next_real_insn (get_last_insn ());
1981
           !CALL_P (insn);
1982
           insn = next_real_insn (insn))
1983
        continue;
1984
 
1985
      if (TREE_CODE (list) == LABEL_DECL)
1986
        add_reg_note (insn, REG_TM, label_rtx (list));
1987
      else
1988
        for (; list ; list = TREE_CHAIN (list))
1989
          add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1990
    }
1991
}
1992
 
1993
/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1994
   statement STMT.  */
1995
 
1996
static void
1997
expand_call_stmt (gimple stmt)
1998
{
1999
  tree exp, decl, lhs;
2000
  bool builtin_p;
2001
  size_t i;
2002
 
2003
  if (gimple_call_internal_p (stmt))
2004
    {
2005
      expand_internal_call (stmt);
2006
      return;
2007
    }
2008
 
2009
  exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2010
 
2011
  CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2012
  decl = gimple_call_fndecl (stmt);
2013
  builtin_p = decl && DECL_BUILT_IN (decl);
2014
 
2015
  /* If this is not a builtin function, the function type through which the
2016
     call is made may be different from the type of the function.  */
2017
  if (!builtin_p)
2018
    CALL_EXPR_FN (exp)
2019
      = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2020
                      CALL_EXPR_FN (exp));
2021
 
2022
  TREE_TYPE (exp) = gimple_call_return_type (stmt);
2023
  CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2024
 
2025
  for (i = 0; i < gimple_call_num_args (stmt); i++)
2026
    {
2027
      tree arg = gimple_call_arg (stmt, i);
2028
      gimple def;
2029
      /* TER addresses into arguments of builtin functions so we have a
2030
         chance to infer more correct alignment information.  See PR39954.  */
2031
      if (builtin_p
2032
          && TREE_CODE (arg) == SSA_NAME
2033
          && (def = get_gimple_for_ssa_name (arg))
2034
          && gimple_assign_rhs_code (def) == ADDR_EXPR)
2035
        arg = gimple_assign_rhs1 (def);
2036
      CALL_EXPR_ARG (exp, i) = arg;
2037
    }
2038
 
2039
  if (gimple_has_side_effects (stmt))
2040
    TREE_SIDE_EFFECTS (exp) = 1;
2041
 
2042
  if (gimple_call_nothrow_p (stmt))
2043
    TREE_NOTHROW (exp) = 1;
2044
 
2045
  CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2046
  CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2047
  if (decl
2048
      && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2049
      && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2050
          || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2051
    CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2052
  else
2053
    CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2054
  CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2055
  SET_EXPR_LOCATION (exp, gimple_location (stmt));
2056
  TREE_BLOCK (exp) = gimple_block (stmt);
2057
 
2058
  /* Ensure RTL is created for debug args.  */
2059
  if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2060
    {
2061
      VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2062
      unsigned int ix;
2063
      tree dtemp;
2064
 
2065
      if (debug_args)
2066
        for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2067
          {
2068
            gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2069
            expand_debug_expr (dtemp);
2070
          }
2071
    }
2072
 
2073
  lhs = gimple_call_lhs (stmt);
2074
  if (lhs)
2075
    expand_assignment (lhs, exp, false);
2076
  else
2077
    expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2078
 
2079
  mark_transaction_restart_calls (stmt);
2080
}
2081
 
2082
/* A subroutine of expand_gimple_stmt, expanding one gimple statement
2083
   STMT that doesn't require special handling for outgoing edges.  That
2084
   is no tailcalls and no GIMPLE_COND.  */
2085
 
2086
static void
2087
expand_gimple_stmt_1 (gimple stmt)
2088
{
2089
  tree op0;
2090
 
2091
  set_curr_insn_source_location (gimple_location (stmt));
2092
  set_curr_insn_block (gimple_block (stmt));
2093
 
2094
  switch (gimple_code (stmt))
2095
    {
2096
    case GIMPLE_GOTO:
2097
      op0 = gimple_goto_dest (stmt);
2098
      if (TREE_CODE (op0) == LABEL_DECL)
2099
        expand_goto (op0);
2100
      else
2101
        expand_computed_goto (op0);
2102
      break;
2103
    case GIMPLE_LABEL:
2104
      expand_label (gimple_label_label (stmt));
2105
      break;
2106
    case GIMPLE_NOP:
2107
    case GIMPLE_PREDICT:
2108
      break;
2109
    case GIMPLE_SWITCH:
2110
      expand_case (stmt);
2111
      break;
2112
    case GIMPLE_ASM:
2113
      expand_asm_stmt (stmt);
2114
      break;
2115
    case GIMPLE_CALL:
2116
      expand_call_stmt (stmt);
2117
      break;
2118
 
2119
    case GIMPLE_RETURN:
2120
      op0 = gimple_return_retval (stmt);
2121
 
2122
      if (op0 && op0 != error_mark_node)
2123
        {
2124
          tree result = DECL_RESULT (current_function_decl);
2125
 
2126
          /* If we are not returning the current function's RESULT_DECL,
2127
             build an assignment to it.  */
2128
          if (op0 != result)
2129
            {
2130
              /* I believe that a function's RESULT_DECL is unique.  */
2131
              gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2132
 
2133
              /* ??? We'd like to use simply expand_assignment here,
2134
                 but this fails if the value is of BLKmode but the return
2135
                 decl is a register.  expand_return has special handling
2136
                 for this combination, which eventually should move
2137
                 to common code.  See comments there.  Until then, let's
2138
                 build a modify expression :-/  */
2139
              op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2140
                            result, op0);
2141
            }
2142
        }
2143
      if (!op0)
2144
        expand_null_return ();
2145
      else
2146
        expand_return (op0);
2147
      break;
2148
 
2149
    case GIMPLE_ASSIGN:
2150
      {
2151
        tree lhs = gimple_assign_lhs (stmt);
2152
 
2153
        /* Tree expand used to fiddle with |= and &= of two bitfield
2154
           COMPONENT_REFs here.  This can't happen with gimple, the LHS
2155
           of binary assigns must be a gimple reg.  */
2156
 
2157
        if (TREE_CODE (lhs) != SSA_NAME
2158
            || get_gimple_rhs_class (gimple_expr_code (stmt))
2159
               == GIMPLE_SINGLE_RHS)
2160
          {
2161
            tree rhs = gimple_assign_rhs1 (stmt);
2162
            gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2163
                        == GIMPLE_SINGLE_RHS);
2164
            if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2165
              SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2166
            if (TREE_CLOBBER_P (rhs))
2167
              /* This is a clobber to mark the going out of scope for
2168
                 this LHS.  */
2169
              ;
2170
            else
2171
              expand_assignment (lhs, rhs,
2172
                                 gimple_assign_nontemporal_move_p (stmt));
2173
          }
2174
        else
2175
          {
2176
            rtx target, temp;
2177
            bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2178
            struct separate_ops ops;
2179
            bool promoted = false;
2180
 
2181
            target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2182
            if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2183
              promoted = true;
2184
 
2185
            ops.code = gimple_assign_rhs_code (stmt);
2186
            ops.type = TREE_TYPE (lhs);
2187
            switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2188
              {
2189
                case GIMPLE_TERNARY_RHS:
2190
                  ops.op2 = gimple_assign_rhs3 (stmt);
2191
                  /* Fallthru */
2192
                case GIMPLE_BINARY_RHS:
2193
                  ops.op1 = gimple_assign_rhs2 (stmt);
2194
                  /* Fallthru */
2195
                case GIMPLE_UNARY_RHS:
2196
                  ops.op0 = gimple_assign_rhs1 (stmt);
2197
                  break;
2198
                default:
2199
                  gcc_unreachable ();
2200
              }
2201
            ops.location = gimple_location (stmt);
2202
 
2203
            /* If we want to use a nontemporal store, force the value to
2204
               register first.  If we store into a promoted register,
2205
               don't directly expand to target.  */
2206
            temp = nontemporal || promoted ? NULL_RTX : target;
2207
            temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2208
                                       EXPAND_NORMAL);
2209
 
2210
            if (temp == target)
2211
              ;
2212
            else if (promoted)
2213
              {
2214
                int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2215
                /* If TEMP is a VOIDmode constant, use convert_modes to make
2216
                   sure that we properly convert it.  */
2217
                if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2218
                  {
2219
                    temp = convert_modes (GET_MODE (target),
2220
                                          TYPE_MODE (ops.type),
2221
                                          temp, unsignedp);
2222
                    temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2223
                                          GET_MODE (target), temp, unsignedp);
2224
                  }
2225
 
2226
                convert_move (SUBREG_REG (target), temp, unsignedp);
2227
              }
2228
            else if (nontemporal && emit_storent_insn (target, temp))
2229
              ;
2230
            else
2231
              {
2232
                temp = force_operand (temp, target);
2233
                if (temp != target)
2234
                  emit_move_insn (target, temp);
2235
              }
2236
          }
2237
      }
2238
      break;
2239
 
2240
    default:
2241
      gcc_unreachable ();
2242
    }
2243
}
2244
 
2245
/* Expand one gimple statement STMT and return the last RTL instruction
2246
   before any of the newly generated ones.
2247
 
2248
   In addition to generating the necessary RTL instructions this also
2249
   sets REG_EH_REGION notes if necessary and sets the current source
2250
   location for diagnostics.  */
2251
 
2252
static rtx
2253
expand_gimple_stmt (gimple stmt)
2254
{
2255
  location_t saved_location = input_location;
2256
  rtx last = get_last_insn ();
2257
  int lp_nr;
2258
 
2259
  gcc_assert (cfun);
2260
 
2261
  /* We need to save and restore the current source location so that errors
2262
     discovered during expansion are emitted with the right location.  But
2263
     it would be better if the diagnostic routines used the source location
2264
     embedded in the tree nodes rather than globals.  */
2265
  if (gimple_has_location (stmt))
2266
    input_location = gimple_location (stmt);
2267
 
2268
  expand_gimple_stmt_1 (stmt);
2269
 
2270
  /* Free any temporaries used to evaluate this statement.  */
2271
  free_temp_slots ();
2272
 
2273
  input_location = saved_location;
2274
 
2275
  /* Mark all insns that may trap.  */
2276
  lp_nr = lookup_stmt_eh_lp (stmt);
2277
  if (lp_nr)
2278
    {
2279
      rtx insn;
2280
      for (insn = next_real_insn (last); insn;
2281
           insn = next_real_insn (insn))
2282
        {
2283
          if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2284
              /* If we want exceptions for non-call insns, any
2285
                 may_trap_p instruction may throw.  */
2286
              && GET_CODE (PATTERN (insn)) != CLOBBER
2287
              && GET_CODE (PATTERN (insn)) != USE
2288
              && insn_could_throw_p (insn))
2289
            make_reg_eh_region_note (insn, 0, lp_nr);
2290
        }
2291
    }
2292
 
2293
  return last;
2294
}
2295
 
2296
/* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
2297
   that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
2298
   generated a tail call (something that might be denied by the ABI
2299
   rules governing the call; see calls.c).
2300
 
2301
   Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2302
   can still reach the rest of BB.  The case here is __builtin_sqrt,
2303
   where the NaN result goes through the external function (with a
2304
   tailcall) and the normal result happens via a sqrt instruction.  */
2305
 
2306
static basic_block
2307
expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2308
{
2309
  rtx last2, last;
2310
  edge e;
2311
  edge_iterator ei;
2312
  int probability;
2313
  gcov_type count;
2314
 
2315
  last2 = last = expand_gimple_stmt (stmt);
2316
 
2317
  for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2318
    if (CALL_P (last) && SIBLING_CALL_P (last))
2319
      goto found;
2320
 
2321
  maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2322
 
2323
  *can_fallthru = true;
2324
  return NULL;
2325
 
2326
 found:
2327
  /* ??? Wouldn't it be better to just reset any pending stack adjust?
2328
     Any instructions emitted here are about to be deleted.  */
2329
  do_pending_stack_adjust ();
2330
 
2331
  /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
2332
  /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
2333
     EH or abnormal edges, we shouldn't have created a tail call in
2334
     the first place.  So it seems to me we should just be removing
2335
     all edges here, or redirecting the existing fallthru edge to
2336
     the exit block.  */
2337
 
2338
  probability = 0;
2339
  count = 0;
2340
 
2341
  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2342
    {
2343
      if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2344
        {
2345
          if (e->dest != EXIT_BLOCK_PTR)
2346
            {
2347
              e->dest->count -= e->count;
2348
              e->dest->frequency -= EDGE_FREQUENCY (e);
2349
              if (e->dest->count < 0)
2350
                e->dest->count = 0;
2351
              if (e->dest->frequency < 0)
2352
                e->dest->frequency = 0;
2353
            }
2354
          count += e->count;
2355
          probability += e->probability;
2356
          remove_edge (e);
2357
        }
2358
      else
2359
        ei_next (&ei);
2360
    }
2361
 
2362
  /* This is somewhat ugly: the call_expr expander often emits instructions
2363
     after the sibcall (to perform the function return).  These confuse the
2364
     find_many_sub_basic_blocks code, so we need to get rid of these.  */
2365
  last = NEXT_INSN (last);
2366
  gcc_assert (BARRIER_P (last));
2367
 
2368
  *can_fallthru = false;
2369
  while (NEXT_INSN (last))
2370
    {
2371
      /* For instance an sqrt builtin expander expands if with
2372
         sibcall in the then and label for `else`.  */
2373
      if (LABEL_P (NEXT_INSN (last)))
2374
        {
2375
          *can_fallthru = true;
2376
          break;
2377
        }
2378
      delete_insn (NEXT_INSN (last));
2379
    }
2380
 
2381
  e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2382
  e->probability += probability;
2383
  e->count += count;
2384
  BB_END (bb) = last;
2385
  update_bb_for_insn (bb);
2386
 
2387
  if (NEXT_INSN (last))
2388
    {
2389
      bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2390
 
2391
      last = BB_END (bb);
2392
      if (BARRIER_P (last))
2393
        BB_END (bb) = PREV_INSN (last);
2394
    }
2395
 
2396
  maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2397
 
2398
  return bb;
2399
}
2400
 
2401
/* Return the difference between the floor and the truncated result of
2402
   a signed division by OP1 with remainder MOD.  */
2403
static rtx
2404
floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2405
{
2406
  /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2407
  return gen_rtx_IF_THEN_ELSE
2408
    (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2409
     gen_rtx_IF_THEN_ELSE
2410
     (mode, gen_rtx_LT (BImode,
2411
                        gen_rtx_DIV (mode, op1, mod),
2412
                        const0_rtx),
2413
      constm1_rtx, const0_rtx),
2414
     const0_rtx);
2415
}
2416
 
2417
/* Return the difference between the ceil and the truncated result of
2418
   a signed division by OP1 with remainder MOD.  */
2419
static rtx
2420
ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2421
{
2422
  /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2423
  return gen_rtx_IF_THEN_ELSE
2424
    (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2425
     gen_rtx_IF_THEN_ELSE
2426
     (mode, gen_rtx_GT (BImode,
2427
                        gen_rtx_DIV (mode, op1, mod),
2428
                        const0_rtx),
2429
      const1_rtx, const0_rtx),
2430
     const0_rtx);
2431
}
2432
 
2433
/* Return the difference between the ceil and the truncated result of
2434
   an unsigned division by OP1 with remainder MOD.  */
2435
static rtx
2436
ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2437
{
2438
  /* (mod != 0 ? 1 : 0) */
2439
  return gen_rtx_IF_THEN_ELSE
2440
    (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2441
     const1_rtx, const0_rtx);
2442
}
2443
 
2444
/* Return the difference between the rounded and the truncated result
2445
   of a signed division by OP1 with remainder MOD.  Halfway cases are
2446
   rounded away from zero, rather than to the nearest even number.  */
2447
static rtx
2448
round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2449
{
2450
  /* (abs (mod) >= abs (op1) - abs (mod)
2451
      ? (op1 / mod > 0 ? 1 : -1)
2452
      : 0) */
2453
  return gen_rtx_IF_THEN_ELSE
2454
    (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2455
                       gen_rtx_MINUS (mode,
2456
                                      gen_rtx_ABS (mode, op1),
2457
                                      gen_rtx_ABS (mode, mod))),
2458
     gen_rtx_IF_THEN_ELSE
2459
     (mode, gen_rtx_GT (BImode,
2460
                        gen_rtx_DIV (mode, op1, mod),
2461
                        const0_rtx),
2462
      const1_rtx, constm1_rtx),
2463
     const0_rtx);
2464
}
2465
 
2466
/* Return the difference between the rounded and the truncated result
2467
   of a unsigned division by OP1 with remainder MOD.  Halfway cases
2468
   are rounded away from zero, rather than to the nearest even
2469
   number.  */
2470
static rtx
2471
round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2472
{
2473
  /* (mod >= op1 - mod ? 1 : 0) */
2474
  return gen_rtx_IF_THEN_ELSE
2475
    (mode, gen_rtx_GE (BImode, mod,
2476
                       gen_rtx_MINUS (mode, op1, mod)),
2477
     const1_rtx, const0_rtx);
2478
}
2479
 
2480
/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2481
   any rtl.  */
2482
 
2483
static rtx
2484
convert_debug_memory_address (enum machine_mode mode, rtx x,
2485
                              addr_space_t as)
2486
{
2487
  enum machine_mode xmode = GET_MODE (x);
2488
 
2489
#ifndef POINTERS_EXTEND_UNSIGNED
2490
  gcc_assert (mode == Pmode
2491
              || mode == targetm.addr_space.address_mode (as));
2492
  gcc_assert (xmode == mode || xmode == VOIDmode);
2493
#else
2494
  rtx temp;
2495
 
2496
  gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2497
 
2498
  if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2499
    return x;
2500
 
2501
  if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2502
    x = simplify_gen_subreg (mode, x, xmode,
2503
                             subreg_lowpart_offset
2504
                             (mode, xmode));
2505
  else if (POINTERS_EXTEND_UNSIGNED > 0)
2506
    x = gen_rtx_ZERO_EXTEND (mode, x);
2507
  else if (!POINTERS_EXTEND_UNSIGNED)
2508
    x = gen_rtx_SIGN_EXTEND (mode, x);
2509
  else
2510
    {
2511
      switch (GET_CODE (x))
2512
        {
2513
        case SUBREG:
2514
          if ((SUBREG_PROMOTED_VAR_P (x)
2515
               || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2516
               || (GET_CODE (SUBREG_REG (x)) == PLUS
2517
                   && REG_P (XEXP (SUBREG_REG (x), 0))
2518
                   && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2519
                   && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2520
              && GET_MODE (SUBREG_REG (x)) == mode)
2521
            return SUBREG_REG (x);
2522
          break;
2523
        case LABEL_REF:
2524
          temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2525
          LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2526
          return temp;
2527
        case SYMBOL_REF:
2528
          temp = shallow_copy_rtx (x);
2529
          PUT_MODE (temp, mode);
2530
          return temp;
2531
        case CONST:
2532
          temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2533
          if (temp)
2534
            temp = gen_rtx_CONST (mode, temp);
2535
          return temp;
2536
        case PLUS:
2537
        case MINUS:
2538
          if (CONST_INT_P (XEXP (x, 1)))
2539
            {
2540
              temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2541
              if (temp)
2542
                return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2543
            }
2544
          break;
2545
        default:
2546
          break;
2547
        }
2548
      /* Don't know how to express ptr_extend as operation in debug info.  */
2549
      return NULL;
2550
    }
2551
#endif /* POINTERS_EXTEND_UNSIGNED */
2552
 
2553
  return x;
2554
}
2555
 
2556
/* Return an RTX equivalent to the value of the parameter DECL.  */
2557
 
2558
static rtx
2559
expand_debug_parm_decl (tree decl)
2560
{
2561
  rtx incoming = DECL_INCOMING_RTL (decl);
2562
 
2563
  if (incoming
2564
      && GET_MODE (incoming) != BLKmode
2565
      && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2566
          || (MEM_P (incoming)
2567
              && REG_P (XEXP (incoming, 0))
2568
              && HARD_REGISTER_P (XEXP (incoming, 0)))))
2569
    {
2570
      rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2571
 
2572
#ifdef HAVE_window_save
2573
      /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2574
         If the target machine has an explicit window save instruction, the
2575
         actual entry value is the corresponding OUTGOING_REGNO instead.  */
2576
      if (REG_P (incoming)
2577
          && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2578
        incoming
2579
          = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2580
                                OUTGOING_REGNO (REGNO (incoming)), 0);
2581
      else if (MEM_P (incoming))
2582
        {
2583
          rtx reg = XEXP (incoming, 0);
2584
          if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2585
            {
2586
              reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2587
              incoming = replace_equiv_address_nv (incoming, reg);
2588
            }
2589
        }
2590
#endif
2591
 
2592
      ENTRY_VALUE_EXP (rtl) = incoming;
2593
      return rtl;
2594
    }
2595
 
2596
  if (incoming
2597
      && GET_MODE (incoming) != BLKmode
2598
      && !TREE_ADDRESSABLE (decl)
2599
      && MEM_P (incoming)
2600
      && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2601
          || (GET_CODE (XEXP (incoming, 0)) == PLUS
2602
              && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2603
              && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2604
    return incoming;
2605
 
2606
  return NULL_RTX;
2607
}
2608
 
2609
/* Return an RTX equivalent to the value of the tree expression EXP.  */
2610
 
2611
static rtx
2612
expand_debug_expr (tree exp)
2613
{
2614
  rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2615
  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2616
  enum machine_mode inner_mode = VOIDmode;
2617
  int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2618
  addr_space_t as;
2619
 
2620
  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2621
    {
2622
    case tcc_expression:
2623
      switch (TREE_CODE (exp))
2624
        {
2625
        case COND_EXPR:
2626
        case DOT_PROD_EXPR:
2627
        case WIDEN_MULT_PLUS_EXPR:
2628
        case WIDEN_MULT_MINUS_EXPR:
2629
        case FMA_EXPR:
2630
          goto ternary;
2631
 
2632
        case TRUTH_ANDIF_EXPR:
2633
        case TRUTH_ORIF_EXPR:
2634
        case TRUTH_AND_EXPR:
2635
        case TRUTH_OR_EXPR:
2636
        case TRUTH_XOR_EXPR:
2637
          goto binary;
2638
 
2639
        case TRUTH_NOT_EXPR:
2640
          goto unary;
2641
 
2642
        default:
2643
          break;
2644
        }
2645
      break;
2646
 
2647
    ternary:
2648
      op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2649
      if (!op2)
2650
        return NULL_RTX;
2651
      /* Fall through.  */
2652
 
2653
    binary:
2654
    case tcc_binary:
2655
    case tcc_comparison:
2656
      op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2657
      if (!op1)
2658
        return NULL_RTX;
2659
      /* Fall through.  */
2660
 
2661
    unary:
2662
    case tcc_unary:
2663
      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2664
      op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2665
      if (!op0)
2666
        return NULL_RTX;
2667
      break;
2668
 
2669
    case tcc_type:
2670
    case tcc_statement:
2671
      gcc_unreachable ();
2672
 
2673
    case tcc_constant:
2674
    case tcc_exceptional:
2675
    case tcc_declaration:
2676
    case tcc_reference:
2677
    case tcc_vl_exp:
2678
      break;
2679
    }
2680
 
2681
  switch (TREE_CODE (exp))
2682
    {
2683
    case STRING_CST:
2684
      if (!lookup_constant_def (exp))
2685
        {
2686
          if (strlen (TREE_STRING_POINTER (exp)) + 1
2687
              != (size_t) TREE_STRING_LENGTH (exp))
2688
            return NULL_RTX;
2689
          op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2690
          op0 = gen_rtx_MEM (BLKmode, op0);
2691
          set_mem_attributes (op0, exp, 0);
2692
          return op0;
2693
        }
2694
      /* Fall through...  */
2695
 
2696
    case INTEGER_CST:
2697
    case REAL_CST:
2698
    case FIXED_CST:
2699
      op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2700
      return op0;
2701
 
2702
    case COMPLEX_CST:
2703
      gcc_assert (COMPLEX_MODE_P (mode));
2704
      op0 = expand_debug_expr (TREE_REALPART (exp));
2705
      op1 = expand_debug_expr (TREE_IMAGPART (exp));
2706
      return gen_rtx_CONCAT (mode, op0, op1);
2707
 
2708
    case DEBUG_EXPR_DECL:
2709
      op0 = DECL_RTL_IF_SET (exp);
2710
 
2711
      if (op0)
2712
        return op0;
2713
 
2714
      op0 = gen_rtx_DEBUG_EXPR (mode);
2715
      DEBUG_EXPR_TREE_DECL (op0) = exp;
2716
      SET_DECL_RTL (exp, op0);
2717
 
2718
      return op0;
2719
 
2720
    case VAR_DECL:
2721
    case PARM_DECL:
2722
    case FUNCTION_DECL:
2723
    case LABEL_DECL:
2724
    case CONST_DECL:
2725
    case RESULT_DECL:
2726
      op0 = DECL_RTL_IF_SET (exp);
2727
 
2728
      /* This decl was probably optimized away.  */
2729
      if (!op0)
2730
        {
2731
          if (TREE_CODE (exp) != VAR_DECL
2732
              || DECL_EXTERNAL (exp)
2733
              || !TREE_STATIC (exp)
2734
              || !DECL_NAME (exp)
2735
              || DECL_HARD_REGISTER (exp)
2736
              || DECL_IN_CONSTANT_POOL (exp)
2737
              || mode == VOIDmode)
2738
            return NULL;
2739
 
2740
          op0 = make_decl_rtl_for_debug (exp);
2741
          if (!MEM_P (op0)
2742
              || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2743
              || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2744
            return NULL;
2745
        }
2746
      else
2747
        op0 = copy_rtx (op0);
2748
 
2749
      if (GET_MODE (op0) == BLKmode
2750
          /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2751
             below would ICE.  While it is likely a FE bug,
2752
             try to be robust here.  See PR43166.  */
2753
          || mode == BLKmode
2754
          || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2755
        {
2756
          gcc_assert (MEM_P (op0));
2757
          op0 = adjust_address_nv (op0, mode, 0);
2758
          return op0;
2759
        }
2760
 
2761
      /* Fall through.  */
2762
 
2763
    adjust_mode:
2764
    case PAREN_EXPR:
2765
    case NOP_EXPR:
2766
    case CONVERT_EXPR:
2767
      {
2768
        inner_mode = GET_MODE (op0);
2769
 
2770
        if (mode == inner_mode)
2771
          return op0;
2772
 
2773
        if (inner_mode == VOIDmode)
2774
          {
2775
            if (TREE_CODE (exp) == SSA_NAME)
2776
              inner_mode = TYPE_MODE (TREE_TYPE (exp));
2777
            else
2778
              inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2779
            if (mode == inner_mode)
2780
              return op0;
2781
          }
2782
 
2783
        if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2784
          {
2785
            if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2786
              op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2787
            else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2788
              op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2789
            else
2790
              op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2791
          }
2792
        else if (FLOAT_MODE_P (mode))
2793
          {
2794
            gcc_assert (TREE_CODE (exp) != SSA_NAME);
2795
            if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2796
              op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2797
            else
2798
              op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2799
          }
2800
        else if (FLOAT_MODE_P (inner_mode))
2801
          {
2802
            if (unsignedp)
2803
              op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2804
            else
2805
              op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2806
          }
2807
        else if (CONSTANT_P (op0)
2808
                 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2809
          op0 = simplify_gen_subreg (mode, op0, inner_mode,
2810
                                     subreg_lowpart_offset (mode,
2811
                                                            inner_mode));
2812
        else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2813
                 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2814
                 : unsignedp)
2815
          op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2816
        else
2817
          op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2818
 
2819
        return op0;
2820
      }
2821
 
2822
    case MEM_REF:
2823
      if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2824
        {
2825
          tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2826
                                     TREE_OPERAND (exp, 0),
2827
                                     TREE_OPERAND (exp, 1));
2828
          if (newexp)
2829
            return expand_debug_expr (newexp);
2830
        }
2831
      /* FALLTHROUGH */
2832
    case INDIRECT_REF:
2833
      op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2834
      if (!op0)
2835
        return NULL;
2836
 
2837
      if (TREE_CODE (exp) == MEM_REF)
2838
        {
2839
          if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2840
              || (GET_CODE (op0) == PLUS
2841
                  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2842
            /* (mem (debug_implicit_ptr)) might confuse aliasing.
2843
               Instead just use get_inner_reference.  */
2844
            goto component_ref;
2845
 
2846
          op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2847
          if (!op1 || !CONST_INT_P (op1))
2848
            return NULL;
2849
 
2850
          op0 = plus_constant (op0, INTVAL (op1));
2851
        }
2852
 
2853
      if (POINTER_TYPE_P (TREE_TYPE (exp)))
2854
        as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2855
      else
2856
        as = ADDR_SPACE_GENERIC;
2857
 
2858
      op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2859
                                          op0, as);
2860
      if (op0 == NULL_RTX)
2861
        return NULL;
2862
 
2863
      op0 = gen_rtx_MEM (mode, op0);
2864
      set_mem_attributes (op0, exp, 0);
2865
      if (TREE_CODE (exp) == MEM_REF
2866
          && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2867
        set_mem_expr (op0, NULL_TREE);
2868
      set_mem_addr_space (op0, as);
2869
 
2870
      return op0;
2871
 
2872
    case TARGET_MEM_REF:
2873
      if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2874
          && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2875
        return NULL;
2876
 
2877
      op0 = expand_debug_expr
2878
            (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2879
      if (!op0)
2880
        return NULL;
2881
 
2882
      if (POINTER_TYPE_P (TREE_TYPE (exp)))
2883
        as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2884
      else
2885
        as = ADDR_SPACE_GENERIC;
2886
 
2887
      op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2888
                                          op0, as);
2889
      if (op0 == NULL_RTX)
2890
        return NULL;
2891
 
2892
      op0 = gen_rtx_MEM (mode, op0);
2893
 
2894
      set_mem_attributes (op0, exp, 0);
2895
      set_mem_addr_space (op0, as);
2896
 
2897
      return op0;
2898
 
2899
    component_ref:
2900
    case ARRAY_REF:
2901
    case ARRAY_RANGE_REF:
2902
    case COMPONENT_REF:
2903
    case BIT_FIELD_REF:
2904
    case REALPART_EXPR:
2905
    case IMAGPART_EXPR:
2906
    case VIEW_CONVERT_EXPR:
2907
      {
2908
        enum machine_mode mode1;
2909
        HOST_WIDE_INT bitsize, bitpos;
2910
        tree offset;
2911
        int volatilep = 0;
2912
        tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2913
                                        &mode1, &unsignedp, &volatilep, false);
2914
        rtx orig_op0;
2915
 
2916
        if (bitsize == 0)
2917
          return NULL;
2918
 
2919
        orig_op0 = op0 = expand_debug_expr (tem);
2920
 
2921
        if (!op0)
2922
          return NULL;
2923
 
2924
        if (offset)
2925
          {
2926
            enum machine_mode addrmode, offmode;
2927
 
2928
            if (!MEM_P (op0))
2929
              return NULL;
2930
 
2931
            op0 = XEXP (op0, 0);
2932
            addrmode = GET_MODE (op0);
2933
            if (addrmode == VOIDmode)
2934
              addrmode = Pmode;
2935
 
2936
            op1 = expand_debug_expr (offset);
2937
            if (!op1)
2938
              return NULL;
2939
 
2940
            offmode = GET_MODE (op1);
2941
            if (offmode == VOIDmode)
2942
              offmode = TYPE_MODE (TREE_TYPE (offset));
2943
 
2944
            if (addrmode != offmode)
2945
              op1 = simplify_gen_subreg (addrmode, op1, offmode,
2946
                                         subreg_lowpart_offset (addrmode,
2947
                                                                offmode));
2948
 
2949
            /* Don't use offset_address here, we don't need a
2950
               recognizable address, and we don't want to generate
2951
               code.  */
2952
            op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2953
                                                          op0, op1));
2954
          }
2955
 
2956
        if (MEM_P (op0))
2957
          {
2958
            if (mode1 == VOIDmode)
2959
              /* Bitfield.  */
2960
              mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2961
            if (bitpos >= BITS_PER_UNIT)
2962
              {
2963
                op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2964
                bitpos %= BITS_PER_UNIT;
2965
              }
2966
            else if (bitpos < 0)
2967
              {
2968
                HOST_WIDE_INT units
2969
                  = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2970
                op0 = adjust_address_nv (op0, mode1, units);
2971
                bitpos += units * BITS_PER_UNIT;
2972
              }
2973
            else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2974
              op0 = adjust_address_nv (op0, mode, 0);
2975
            else if (GET_MODE (op0) != mode1)
2976
              op0 = adjust_address_nv (op0, mode1, 0);
2977
            else
2978
              op0 = copy_rtx (op0);
2979
            if (op0 == orig_op0)
2980
              op0 = shallow_copy_rtx (op0);
2981
            set_mem_attributes (op0, exp, 0);
2982
          }
2983
 
2984
        if (bitpos == 0 && mode == GET_MODE (op0))
2985
          return op0;
2986
 
2987
        if (bitpos < 0)
2988
          return NULL;
2989
 
2990
        if (GET_MODE (op0) == BLKmode)
2991
          return NULL;
2992
 
2993
        if ((bitpos % BITS_PER_UNIT) == 0
2994
            && bitsize == GET_MODE_BITSIZE (mode1))
2995
          {
2996
            enum machine_mode opmode = GET_MODE (op0);
2997
 
2998
            if (opmode == VOIDmode)
2999
              opmode = TYPE_MODE (TREE_TYPE (tem));
3000
 
3001
            /* This condition may hold if we're expanding the address
3002
               right past the end of an array that turned out not to
3003
               be addressable (i.e., the address was only computed in
3004
               debug stmts).  The gen_subreg below would rightfully
3005
               crash, and the address doesn't really exist, so just
3006
               drop it.  */
3007
            if (bitpos >= GET_MODE_BITSIZE (opmode))
3008
              return NULL;
3009
 
3010
            if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3011
              return simplify_gen_subreg (mode, op0, opmode,
3012
                                          bitpos / BITS_PER_UNIT);
3013
          }
3014
 
3015
        return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3016
                                     && TYPE_UNSIGNED (TREE_TYPE (exp))
3017
                                     ? SIGN_EXTRACT
3018
                                     : ZERO_EXTRACT, mode,
3019
                                     GET_MODE (op0) != VOIDmode
3020
                                     ? GET_MODE (op0)
3021
                                     : TYPE_MODE (TREE_TYPE (tem)),
3022
                                     op0, GEN_INT (bitsize), GEN_INT (bitpos));
3023
      }
3024
 
3025
    case ABS_EXPR:
3026
      return simplify_gen_unary (ABS, mode, op0, mode);
3027
 
3028
    case NEGATE_EXPR:
3029
      return simplify_gen_unary (NEG, mode, op0, mode);
3030
 
3031
    case BIT_NOT_EXPR:
3032
      return simplify_gen_unary (NOT, mode, op0, mode);
3033
 
3034
    case FLOAT_EXPR:
3035
      return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3036
                                                                         0)))
3037
                                 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3038
                                 inner_mode);
3039
 
3040
    case FIX_TRUNC_EXPR:
3041
      return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3042
                                 inner_mode);
3043
 
3044
    case POINTER_PLUS_EXPR:
3045
      /* For the rare target where pointers are not the same size as
3046
         size_t, we need to check for mis-matched modes and correct
3047
         the addend.  */
3048
      if (op0 && op1
3049
          && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3050
          && GET_MODE (op0) != GET_MODE (op1))
3051
        {
3052
          if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
3053
            op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3054
                                      GET_MODE (op1));
3055
          else
3056
            /* We always sign-extend, regardless of the signedness of
3057
               the operand, because the operand is always unsigned
3058
               here even if the original C expression is signed.  */
3059
            op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3060
                                      GET_MODE (op1));
3061
        }
3062
      /* Fall through.  */
3063
    case PLUS_EXPR:
3064
      return simplify_gen_binary (PLUS, mode, op0, op1);
3065
 
3066
    case MINUS_EXPR:
3067
      return simplify_gen_binary (MINUS, mode, op0, op1);
3068
 
3069
    case MULT_EXPR:
3070
      return simplify_gen_binary (MULT, mode, op0, op1);
3071
 
3072
    case RDIV_EXPR:
3073
    case TRUNC_DIV_EXPR:
3074
    case EXACT_DIV_EXPR:
3075
      if (unsignedp)
3076
        return simplify_gen_binary (UDIV, mode, op0, op1);
3077
      else
3078
        return simplify_gen_binary (DIV, mode, op0, op1);
3079
 
3080
    case TRUNC_MOD_EXPR:
3081
      return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3082
 
3083
    case FLOOR_DIV_EXPR:
3084
      if (unsignedp)
3085
        return simplify_gen_binary (UDIV, mode, op0, op1);
3086
      else
3087
        {
3088
          rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3089
          rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3090
          rtx adj = floor_sdiv_adjust (mode, mod, op1);
3091
          return simplify_gen_binary (PLUS, mode, div, adj);
3092
        }
3093
 
3094
    case FLOOR_MOD_EXPR:
3095
      if (unsignedp)
3096
        return simplify_gen_binary (UMOD, mode, op0, op1);
3097
      else
3098
        {
3099
          rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3100
          rtx adj = floor_sdiv_adjust (mode, mod, op1);
3101
          adj = simplify_gen_unary (NEG, mode,
3102
                                    simplify_gen_binary (MULT, mode, adj, op1),
3103
                                    mode);
3104
          return simplify_gen_binary (PLUS, mode, mod, adj);
3105
        }
3106
 
3107
    case CEIL_DIV_EXPR:
3108
      if (unsignedp)
3109
        {
3110
          rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3111
          rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3112
          rtx adj = ceil_udiv_adjust (mode, mod, op1);
3113
          return simplify_gen_binary (PLUS, mode, div, adj);
3114
        }
3115
      else
3116
        {
3117
          rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3118
          rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3119
          rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3120
          return simplify_gen_binary (PLUS, mode, div, adj);
3121
        }
3122
 
3123
    case CEIL_MOD_EXPR:
3124
      if (unsignedp)
3125
        {
3126
          rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3127
          rtx adj = ceil_udiv_adjust (mode, mod, op1);
3128
          adj = simplify_gen_unary (NEG, mode,
3129
                                    simplify_gen_binary (MULT, mode, adj, op1),
3130
                                    mode);
3131
          return simplify_gen_binary (PLUS, mode, mod, adj);
3132
        }
3133
      else
3134
        {
3135
          rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3136
          rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3137
          adj = simplify_gen_unary (NEG, mode,
3138
                                    simplify_gen_binary (MULT, mode, adj, op1),
3139
                                    mode);
3140
          return simplify_gen_binary (PLUS, mode, mod, adj);
3141
        }
3142
 
3143
    case ROUND_DIV_EXPR:
3144
      if (unsignedp)
3145
        {
3146
          rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3147
          rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3148
          rtx adj = round_udiv_adjust (mode, mod, op1);
3149
          return simplify_gen_binary (PLUS, mode, div, adj);
3150
        }
3151
      else
3152
        {
3153
          rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3154
          rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3155
          rtx adj = round_sdiv_adjust (mode, mod, op1);
3156
          return simplify_gen_binary (PLUS, mode, div, adj);
3157
        }
3158
 
3159
    case ROUND_MOD_EXPR:
3160
      if (unsignedp)
3161
        {
3162
          rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3163
          rtx adj = round_udiv_adjust (mode, mod, op1);
3164
          adj = simplify_gen_unary (NEG, mode,
3165
                                    simplify_gen_binary (MULT, mode, adj, op1),
3166
                                    mode);
3167
          return simplify_gen_binary (PLUS, mode, mod, adj);
3168
        }
3169
      else
3170
        {
3171
          rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3172
          rtx adj = round_sdiv_adjust (mode, mod, op1);
3173
          adj = simplify_gen_unary (NEG, mode,
3174
                                    simplify_gen_binary (MULT, mode, adj, op1),
3175
                                    mode);
3176
          return simplify_gen_binary (PLUS, mode, mod, adj);
3177
        }
3178
 
3179
    case LSHIFT_EXPR:
3180
      return simplify_gen_binary (ASHIFT, mode, op0, op1);
3181
 
3182
    case RSHIFT_EXPR:
3183
      if (unsignedp)
3184
        return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3185
      else
3186
        return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3187
 
3188
    case LROTATE_EXPR:
3189
      return simplify_gen_binary (ROTATE, mode, op0, op1);
3190
 
3191
    case RROTATE_EXPR:
3192
      return simplify_gen_binary (ROTATERT, mode, op0, op1);
3193
 
3194
    case MIN_EXPR:
3195
      return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3196
 
3197
    case MAX_EXPR:
3198
      return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3199
 
3200
    case BIT_AND_EXPR:
3201
    case TRUTH_AND_EXPR:
3202
      return simplify_gen_binary (AND, mode, op0, op1);
3203
 
3204
    case BIT_IOR_EXPR:
3205
    case TRUTH_OR_EXPR:
3206
      return simplify_gen_binary (IOR, mode, op0, op1);
3207
 
3208
    case BIT_XOR_EXPR:
3209
    case TRUTH_XOR_EXPR:
3210
      return simplify_gen_binary (XOR, mode, op0, op1);
3211
 
3212
    case TRUTH_ANDIF_EXPR:
3213
      return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3214
 
3215
    case TRUTH_ORIF_EXPR:
3216
      return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3217
 
3218
    case TRUTH_NOT_EXPR:
3219
      return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3220
 
3221
    case LT_EXPR:
3222
      return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3223
                                      op0, op1);
3224
 
3225
    case LE_EXPR:
3226
      return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3227
                                      op0, op1);
3228
 
3229
    case GT_EXPR:
3230
      return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3231
                                      op0, op1);
3232
 
3233
    case GE_EXPR:
3234
      return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3235
                                      op0, op1);
3236
 
3237
    case EQ_EXPR:
3238
      return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3239
 
3240
    case NE_EXPR:
3241
      return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3242
 
3243
    case UNORDERED_EXPR:
3244
      return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3245
 
3246
    case ORDERED_EXPR:
3247
      return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3248
 
3249
    case UNLT_EXPR:
3250
      return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3251
 
3252
    case UNLE_EXPR:
3253
      return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3254
 
3255
    case UNGT_EXPR:
3256
      return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3257
 
3258
    case UNGE_EXPR:
3259
      return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3260
 
3261
    case UNEQ_EXPR:
3262
      return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3263
 
3264
    case LTGT_EXPR:
3265
      return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3266
 
3267
    case COND_EXPR:
3268
      return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3269
 
3270
    case COMPLEX_EXPR:
3271
      gcc_assert (COMPLEX_MODE_P (mode));
3272
      if (GET_MODE (op0) == VOIDmode)
3273
        op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3274
      if (GET_MODE (op1) == VOIDmode)
3275
        op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3276
      return gen_rtx_CONCAT (mode, op0, op1);
3277
 
3278
    case CONJ_EXPR:
3279
      if (GET_CODE (op0) == CONCAT)
3280
        return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3281
                               simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3282
                                                   XEXP (op0, 1),
3283
                                                   GET_MODE_INNER (mode)));
3284
      else
3285
        {
3286
          enum machine_mode imode = GET_MODE_INNER (mode);
3287
          rtx re, im;
3288
 
3289
          if (MEM_P (op0))
3290
            {
3291
              re = adjust_address_nv (op0, imode, 0);
3292
              im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3293
            }
3294
          else
3295
            {
3296
              enum machine_mode ifmode = int_mode_for_mode (mode);
3297
              enum machine_mode ihmode = int_mode_for_mode (imode);
3298
              rtx halfsize;
3299
              if (ifmode == BLKmode || ihmode == BLKmode)
3300
                return NULL;
3301
              halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3302
              re = op0;
3303
              if (mode != ifmode)
3304
                re = gen_rtx_SUBREG (ifmode, re, 0);
3305
              re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3306
              if (imode != ihmode)
3307
                re = gen_rtx_SUBREG (imode, re, 0);
3308
              im = copy_rtx (op0);
3309
              if (mode != ifmode)
3310
                im = gen_rtx_SUBREG (ifmode, im, 0);
3311
              im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3312
              if (imode != ihmode)
3313
                im = gen_rtx_SUBREG (imode, im, 0);
3314
            }
3315
          im = gen_rtx_NEG (imode, im);
3316
          return gen_rtx_CONCAT (mode, re, im);
3317
        }
3318
 
3319
    case ADDR_EXPR:
3320
      op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3321
      if (!op0 || !MEM_P (op0))
3322
        {
3323
          if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3324
               || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3325
               || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3326
              && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3327
                  || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3328
            return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3329
 
3330
          if (handled_component_p (TREE_OPERAND (exp, 0)))
3331
            {
3332
              HOST_WIDE_INT bitoffset, bitsize, maxsize;
3333
              tree decl
3334
                = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3335
                                           &bitoffset, &bitsize, &maxsize);
3336
              if ((TREE_CODE (decl) == VAR_DECL
3337
                   || TREE_CODE (decl) == PARM_DECL
3338
                   || TREE_CODE (decl) == RESULT_DECL)
3339
                  && (!TREE_ADDRESSABLE (decl)
3340
                      || target_for_debug_bind (decl))
3341
                  && (bitoffset % BITS_PER_UNIT) == 0
3342
                  && bitsize > 0
3343
                  && bitsize == maxsize)
3344
                return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3345
                                      bitoffset / BITS_PER_UNIT);
3346
            }
3347
 
3348
          return NULL;
3349
        }
3350
 
3351
      as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3352
      op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3353
 
3354
      return op0;
3355
 
3356
    case VECTOR_CST:
3357
      exp = build_constructor_from_list (TREE_TYPE (exp),
3358
                                         TREE_VECTOR_CST_ELTS (exp));
3359
      /* Fall through.  */
3360
 
3361
    case CONSTRUCTOR:
3362
      if (TREE_CLOBBER_P (exp))
3363
        return NULL;
3364
      else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3365
        {
3366
          unsigned i;
3367
          tree val;
3368
 
3369
          op0 = gen_rtx_CONCATN
3370
            (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3371
 
3372
          FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3373
            {
3374
              op1 = expand_debug_expr (val);
3375
              if (!op1)
3376
                return NULL;
3377
              XVECEXP (op0, 0, i) = op1;
3378
            }
3379
 
3380
          if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3381
            {
3382
              op1 = expand_debug_expr
3383
                (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3384
 
3385
              if (!op1)
3386
                return NULL;
3387
 
3388
              for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3389
                XVECEXP (op0, 0, i) = op1;
3390
            }
3391
 
3392
          return op0;
3393
        }
3394
      else
3395
        goto flag_unsupported;
3396
 
3397
    case CALL_EXPR:
3398
      /* ??? Maybe handle some builtins?  */
3399
      return NULL;
3400
 
3401
    case SSA_NAME:
3402
      {
3403
        gimple g = get_gimple_for_ssa_name (exp);
3404
        if (g)
3405
          {
3406
            op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3407
            if (!op0)
3408
              return NULL;
3409
          }
3410
        else
3411
          {
3412
            int part = var_to_partition (SA.map, exp);
3413
 
3414
            if (part == NO_PARTITION)
3415
              {
3416
                /* If this is a reference to an incoming value of parameter
3417
                   that is never used in the code or where the incoming
3418
                   value is never used in the code, use PARM_DECL's
3419
                   DECL_RTL if set.  */
3420
                if (SSA_NAME_IS_DEFAULT_DEF (exp)
3421
                    && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3422
                  {
3423
                    op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3424
                    if (op0)
3425
                      goto adjust_mode;
3426
                    op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3427
                    if (op0)
3428
                      goto adjust_mode;
3429
                  }
3430
                return NULL;
3431
              }
3432
 
3433
            gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3434
 
3435
            op0 = copy_rtx (SA.partition_to_pseudo[part]);
3436
          }
3437
        goto adjust_mode;
3438
      }
3439
 
3440
    case ERROR_MARK:
3441
      return NULL;
3442
 
3443
    /* Vector stuff.  For most of the codes we don't have rtl codes.  */
3444
    case REALIGN_LOAD_EXPR:
3445
    case REDUC_MAX_EXPR:
3446
    case REDUC_MIN_EXPR:
3447
    case REDUC_PLUS_EXPR:
3448
    case VEC_COND_EXPR:
3449
    case VEC_LSHIFT_EXPR:
3450
    case VEC_PACK_FIX_TRUNC_EXPR:
3451
    case VEC_PACK_SAT_EXPR:
3452
    case VEC_PACK_TRUNC_EXPR:
3453
    case VEC_RSHIFT_EXPR:
3454
    case VEC_UNPACK_FLOAT_HI_EXPR:
3455
    case VEC_UNPACK_FLOAT_LO_EXPR:
3456
    case VEC_UNPACK_HI_EXPR:
3457
    case VEC_UNPACK_LO_EXPR:
3458
    case VEC_WIDEN_MULT_HI_EXPR:
3459
    case VEC_WIDEN_MULT_LO_EXPR:
3460
    case VEC_WIDEN_LSHIFT_HI_EXPR:
3461
    case VEC_WIDEN_LSHIFT_LO_EXPR:
3462
    case VEC_PERM_EXPR:
3463
      return NULL;
3464
 
3465
   /* Misc codes.  */
3466
    case ADDR_SPACE_CONVERT_EXPR:
3467
    case FIXED_CONVERT_EXPR:
3468
    case OBJ_TYPE_REF:
3469
    case WITH_SIZE_EXPR:
3470
      return NULL;
3471
 
3472
    case DOT_PROD_EXPR:
3473
      if (SCALAR_INT_MODE_P (GET_MODE (op0))
3474
          && SCALAR_INT_MODE_P (mode))
3475
        {
3476
          op0
3477
            = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3478
                                                                          0)))
3479
                                  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3480
                                  inner_mode);
3481
          op1
3482
            = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3483
                                                                          1)))
3484
                                  ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3485
                                  inner_mode);
3486
          op0 = simplify_gen_binary (MULT, mode, op0, op1);
3487
          return simplify_gen_binary (PLUS, mode, op0, op2);
3488
        }
3489
      return NULL;
3490
 
3491
    case WIDEN_MULT_EXPR:
3492
    case WIDEN_MULT_PLUS_EXPR:
3493
    case WIDEN_MULT_MINUS_EXPR:
3494
      if (SCALAR_INT_MODE_P (GET_MODE (op0))
3495
          && SCALAR_INT_MODE_P (mode))
3496
        {
3497
          inner_mode = GET_MODE (op0);
3498
          if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3499
            op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3500
          else
3501
            op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3502
          if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3503
            op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3504
          else
3505
            op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3506
          op0 = simplify_gen_binary (MULT, mode, op0, op1);
3507
          if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3508
            return op0;
3509
          else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3510
            return simplify_gen_binary (PLUS, mode, op0, op2);
3511
          else
3512
            return simplify_gen_binary (MINUS, mode, op2, op0);
3513
        }
3514
      return NULL;
3515
 
3516
    case WIDEN_SUM_EXPR:
3517
    case WIDEN_LSHIFT_EXPR:
3518
      if (SCALAR_INT_MODE_P (GET_MODE (op0))
3519
          && SCALAR_INT_MODE_P (mode))
3520
        {
3521
          op0
3522
            = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3523
                                                                          0)))
3524
                                  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3525
                                  inner_mode);
3526
          return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3527
                                      ? ASHIFT : PLUS, mode, op0, op1);
3528
        }
3529
      return NULL;
3530
 
3531
    case FMA_EXPR:
3532
      return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3533
 
3534
    default:
3535
    flag_unsupported:
3536
#ifdef ENABLE_CHECKING
3537
      debug_tree (exp);
3538
      gcc_unreachable ();
3539
#else
3540
      return NULL;
3541
#endif
3542
    }
3543
}
3544
 
3545
/* Return an RTX equivalent to the source bind value of the tree expression
3546
   EXP.  */
3547
 
3548
static rtx
3549
expand_debug_source_expr (tree exp)
3550
{
3551
  rtx op0 = NULL_RTX;
3552
  enum machine_mode mode = VOIDmode, inner_mode;
3553
 
3554
  switch (TREE_CODE (exp))
3555
    {
3556
    case PARM_DECL:
3557
      {
3558
        mode = DECL_MODE (exp);
3559
        op0 = expand_debug_parm_decl (exp);
3560
        if (op0)
3561
           break;
3562
        /* See if this isn't an argument that has been completely
3563
           optimized out.  */
3564
        if (!DECL_RTL_SET_P (exp)
3565
            && !DECL_INCOMING_RTL (exp)
3566
            && DECL_ABSTRACT_ORIGIN (current_function_decl))
3567
          {
3568
            tree aexp = exp;
3569
            if (DECL_ABSTRACT_ORIGIN (exp))
3570
              aexp = DECL_ABSTRACT_ORIGIN (exp);
3571
            if (DECL_CONTEXT (aexp)
3572
                == DECL_ABSTRACT_ORIGIN (current_function_decl))
3573
              {
3574
                VEC(tree, gc) **debug_args;
3575
                unsigned int ix;
3576
                tree ddecl;
3577
#ifdef ENABLE_CHECKING
3578
                tree parm;
3579
                for (parm = DECL_ARGUMENTS (current_function_decl);
3580
                     parm; parm = DECL_CHAIN (parm))
3581
                  gcc_assert (parm != exp
3582
                              && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3583
#endif
3584
                debug_args = decl_debug_args_lookup (current_function_decl);
3585
                if (debug_args != NULL)
3586
                  {
3587
                    for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3588
                         ix += 2)
3589
                      if (ddecl == aexp)
3590
                        return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3591
                  }
3592
              }
3593
          }
3594
        break;
3595
      }
3596
    default:
3597
      break;
3598
    }
3599
 
3600
  if (op0 == NULL_RTX)
3601
    return NULL_RTX;
3602
 
3603
  inner_mode = GET_MODE (op0);
3604
  if (mode == inner_mode)
3605
    return op0;
3606
 
3607
  if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3608
    {
3609
      if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3610
        op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3611
      else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3612
        op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3613
      else
3614
        op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3615
    }
3616
  else if (FLOAT_MODE_P (mode))
3617
    gcc_unreachable ();
3618
  else if (FLOAT_MODE_P (inner_mode))
3619
    {
3620
      if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3621
        op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3622
      else
3623
        op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3624
    }
3625
  else if (CONSTANT_P (op0)
3626
           || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3627
    op0 = simplify_gen_subreg (mode, op0, inner_mode,
3628
                               subreg_lowpart_offset (mode, inner_mode));
3629
  else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3630
    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3631
  else
3632
    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3633
 
3634
  return op0;
3635
}
3636
 
3637
/* Expand the _LOCs in debug insns.  We run this after expanding all
3638
   regular insns, so that any variables referenced in the function
3639
   will have their DECL_RTLs set.  */
3640
 
3641
static void
3642
expand_debug_locations (void)
3643
{
3644
  rtx insn;
3645
  rtx last = get_last_insn ();
3646
  int save_strict_alias = flag_strict_aliasing;
3647
 
3648
  /* New alias sets while setting up memory attributes cause
3649
     -fcompare-debug failures, even though it doesn't bring about any
3650
     codegen changes.  */
3651
  flag_strict_aliasing = 0;
3652
 
3653
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3654
    if (DEBUG_INSN_P (insn))
3655
      {
3656
        tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3657
        rtx val;
3658
        enum machine_mode mode;
3659
 
3660
        if (value == NULL_TREE)
3661
          val = NULL_RTX;
3662
        else
3663
          {
3664
            if (INSN_VAR_LOCATION_STATUS (insn)
3665
                == VAR_INIT_STATUS_UNINITIALIZED)
3666
              val = expand_debug_source_expr (value);
3667
            else
3668
              val = expand_debug_expr (value);
3669
            gcc_assert (last == get_last_insn ());
3670
          }
3671
 
3672
        if (!val)
3673
          val = gen_rtx_UNKNOWN_VAR_LOC ();
3674
        else
3675
          {
3676
            mode = GET_MODE (INSN_VAR_LOCATION (insn));
3677
 
3678
            gcc_assert (mode == GET_MODE (val)
3679
                        || (GET_MODE (val) == VOIDmode
3680
                            && (CONST_INT_P (val)
3681
                                || GET_CODE (val) == CONST_FIXED
3682
                                || GET_CODE (val) == CONST_DOUBLE
3683
                                || GET_CODE (val) == LABEL_REF)));
3684
          }
3685
 
3686
        INSN_VAR_LOCATION_LOC (insn) = val;
3687
      }
3688
 
3689
  flag_strict_aliasing = save_strict_alias;
3690
}
3691
 
3692
/* Expand basic block BB from GIMPLE trees to RTL.  */
3693
 
3694
static basic_block
3695
expand_gimple_basic_block (basic_block bb)
3696
{
3697
  gimple_stmt_iterator gsi;
3698
  gimple_seq stmts;
3699
  gimple stmt = NULL;
3700
  rtx note, last;
3701
  edge e;
3702
  edge_iterator ei;
3703
  void **elt;
3704
 
3705
  if (dump_file)
3706
    fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3707
             bb->index);
3708
 
3709
  /* Note that since we are now transitioning from GIMPLE to RTL, we
3710
     cannot use the gsi_*_bb() routines because they expect the basic
3711
     block to be in GIMPLE, instead of RTL.  Therefore, we need to
3712
     access the BB sequence directly.  */
3713
  stmts = bb_seq (bb);
3714
  bb->il.gimple = NULL;
3715
  rtl_profile_for_bb (bb);
3716
  init_rtl_bb_info (bb);
3717
  bb->flags |= BB_RTL;
3718
 
3719
  /* Remove the RETURN_EXPR if we may fall though to the exit
3720
     instead.  */
3721
  gsi = gsi_last (stmts);
3722
  if (!gsi_end_p (gsi)
3723
      && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3724
    {
3725
      gimple ret_stmt = gsi_stmt (gsi);
3726
 
3727
      gcc_assert (single_succ_p (bb));
3728
      gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3729
 
3730
      if (bb->next_bb == EXIT_BLOCK_PTR
3731
          && !gimple_return_retval (ret_stmt))
3732
        {
3733
          gsi_remove (&gsi, false);
3734
          single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3735
        }
3736
    }
3737
 
3738
  gsi = gsi_start (stmts);
3739
  if (!gsi_end_p (gsi))
3740
    {
3741
      stmt = gsi_stmt (gsi);
3742
      if (gimple_code (stmt) != GIMPLE_LABEL)
3743
        stmt = NULL;
3744
    }
3745
 
3746
  elt = pointer_map_contains (lab_rtx_for_bb, bb);
3747
 
3748
  if (stmt || elt)
3749
    {
3750
      last = get_last_insn ();
3751
 
3752
      if (stmt)
3753
        {
3754
          expand_gimple_stmt (stmt);
3755
          gsi_next (&gsi);
3756
        }
3757
 
3758
      if (elt)
3759
        emit_label ((rtx) *elt);
3760
 
3761
      /* Java emits line number notes in the top of labels.
3762
         ??? Make this go away once line number notes are obsoleted.  */
3763
      BB_HEAD (bb) = NEXT_INSN (last);
3764
      if (NOTE_P (BB_HEAD (bb)))
3765
        BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3766
      note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3767
 
3768
      maybe_dump_rtl_for_gimple_stmt (stmt, last);
3769
    }
3770
  else
3771
    note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3772
 
3773
  NOTE_BASIC_BLOCK (note) = bb;
3774
 
3775
  for (; !gsi_end_p (gsi); gsi_next (&gsi))
3776
    {
3777
      basic_block new_bb;
3778
 
3779
      stmt = gsi_stmt (gsi);
3780
 
3781
      /* If this statement is a non-debug one, and we generate debug
3782
         insns, then this one might be the last real use of a TERed
3783
         SSA_NAME, but where there are still some debug uses further
3784
         down.  Expanding the current SSA name in such further debug
3785
         uses by their RHS might lead to wrong debug info, as coalescing
3786
         might make the operands of such RHS be placed into the same
3787
         pseudo as something else.  Like so:
3788
           a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
3789
           use(a_1);
3790
           a_2 = ...
3791
           #DEBUG ... => a_1
3792
         As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3793
         If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3794
         the write to a_2 would actually have clobbered the place which
3795
         formerly held a_0.
3796
 
3797
         So, instead of that, we recognize the situation, and generate
3798
         debug temporaries at the last real use of TERed SSA names:
3799
           a_1 = a_0 + 1;
3800
           #DEBUG #D1 => a_1
3801
           use(a_1);
3802
           a_2 = ...
3803
           #DEBUG ... => #D1
3804
         */
3805
      if (MAY_HAVE_DEBUG_INSNS
3806
          && SA.values
3807
          && !is_gimple_debug (stmt))
3808
        {
3809
          ssa_op_iter iter;
3810
          tree op;
3811
          gimple def;
3812
 
3813
          location_t sloc = get_curr_insn_source_location ();
3814
          tree sblock = get_curr_insn_block ();
3815
 
3816
          /* Look for SSA names that have their last use here (TERed
3817
             names always have only one real use).  */
3818
          FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3819
            if ((def = get_gimple_for_ssa_name (op)))
3820
              {
3821
                imm_use_iterator imm_iter;
3822
                use_operand_p use_p;
3823
                bool have_debug_uses = false;
3824
 
3825
                FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3826
                  {
3827
                    if (gimple_debug_bind_p (USE_STMT (use_p)))
3828
                      {
3829
                        have_debug_uses = true;
3830
                        break;
3831
                      }
3832
                  }
3833
 
3834
                if (have_debug_uses)
3835
                  {
3836
                    /* OP is a TERed SSA name, with DEF it's defining
3837
                       statement, and where OP is used in further debug
3838
                       instructions.  Generate a debug temporary, and
3839
                       replace all uses of OP in debug insns with that
3840
                       temporary.  */
3841
                    gimple debugstmt;
3842
                    tree value = gimple_assign_rhs_to_tree (def);
3843
                    tree vexpr = make_node (DEBUG_EXPR_DECL);
3844
                    rtx val;
3845
                    enum machine_mode mode;
3846
 
3847
                    set_curr_insn_source_location (gimple_location (def));
3848
                    set_curr_insn_block (gimple_block (def));
3849
 
3850
                    DECL_ARTIFICIAL (vexpr) = 1;
3851
                    TREE_TYPE (vexpr) = TREE_TYPE (value);
3852
                    if (DECL_P (value))
3853
                      mode = DECL_MODE (value);
3854
                    else
3855
                      mode = TYPE_MODE (TREE_TYPE (value));
3856
                    DECL_MODE (vexpr) = mode;
3857
 
3858
                    val = gen_rtx_VAR_LOCATION
3859
                        (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3860
 
3861
                    emit_debug_insn (val);
3862
 
3863
                    FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3864
                      {
3865
                        if (!gimple_debug_bind_p (debugstmt))
3866
                          continue;
3867
 
3868
                        FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3869
                          SET_USE (use_p, vexpr);
3870
 
3871
                        update_stmt (debugstmt);
3872
                      }
3873
                  }
3874
              }
3875
          set_curr_insn_source_location (sloc);
3876
          set_curr_insn_block (sblock);
3877
        }
3878
 
3879
      currently_expanding_gimple_stmt = stmt;
3880
 
3881
      /* Expand this statement, then evaluate the resulting RTL and
3882
         fixup the CFG accordingly.  */
3883
      if (gimple_code (stmt) == GIMPLE_COND)
3884
        {
3885
          new_bb = expand_gimple_cond (bb, stmt);
3886
          if (new_bb)
3887
            return new_bb;
3888
        }
3889
      else if (gimple_debug_bind_p (stmt))
3890
        {
3891
          location_t sloc = get_curr_insn_source_location ();
3892
          tree sblock = get_curr_insn_block ();
3893
          gimple_stmt_iterator nsi = gsi;
3894
 
3895
          for (;;)
3896
            {
3897
              tree var = gimple_debug_bind_get_var (stmt);
3898
              tree value;
3899
              rtx val;
3900
              enum machine_mode mode;
3901
 
3902
              if (TREE_CODE (var) != DEBUG_EXPR_DECL
3903
                  && TREE_CODE (var) != LABEL_DECL
3904
                  && !target_for_debug_bind (var))
3905
                goto delink_debug_stmt;
3906
 
3907
              if (gimple_debug_bind_has_value_p (stmt))
3908
                value = gimple_debug_bind_get_value (stmt);
3909
              else
3910
                value = NULL_TREE;
3911
 
3912
              last = get_last_insn ();
3913
 
3914
              set_curr_insn_source_location (gimple_location (stmt));
3915
              set_curr_insn_block (gimple_block (stmt));
3916
 
3917
              if (DECL_P (var))
3918
                mode = DECL_MODE (var);
3919
              else
3920
                mode = TYPE_MODE (TREE_TYPE (var));
3921
 
3922
              val = gen_rtx_VAR_LOCATION
3923
                (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3924
 
3925
              emit_debug_insn (val);
3926
 
3927
              if (dump_file && (dump_flags & TDF_DETAILS))
3928
                {
3929
                  /* We can't dump the insn with a TREE where an RTX
3930
                     is expected.  */
3931
                  PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3932
                  maybe_dump_rtl_for_gimple_stmt (stmt, last);
3933
                  PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3934
                }
3935
 
3936
            delink_debug_stmt:
3937
              /* In order not to generate too many debug temporaries,
3938
                 we delink all uses of debug statements we already expanded.
3939
                 Therefore debug statements between definition and real
3940
                 use of TERed SSA names will continue to use the SSA name,
3941
                 and not be replaced with debug temps.  */
3942
              delink_stmt_imm_use (stmt);
3943
 
3944
              gsi = nsi;
3945
              gsi_next (&nsi);
3946
              if (gsi_end_p (nsi))
3947
                break;
3948
              stmt = gsi_stmt (nsi);
3949
              if (!gimple_debug_bind_p (stmt))
3950
                break;
3951
            }
3952
 
3953
          set_curr_insn_source_location (sloc);
3954
          set_curr_insn_block (sblock);
3955
        }
3956
      else if (gimple_debug_source_bind_p (stmt))
3957
        {
3958
          location_t sloc = get_curr_insn_source_location ();
3959
          tree sblock = get_curr_insn_block ();
3960
          tree var = gimple_debug_source_bind_get_var (stmt);
3961
          tree value = gimple_debug_source_bind_get_value (stmt);
3962
          rtx val;
3963
          enum machine_mode mode;
3964
 
3965
          last = get_last_insn ();
3966
 
3967
          set_curr_insn_source_location (gimple_location (stmt));
3968
          set_curr_insn_block (gimple_block (stmt));
3969
 
3970
          mode = DECL_MODE (var);
3971
 
3972
          val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3973
                                      VAR_INIT_STATUS_UNINITIALIZED);
3974
 
3975
          emit_debug_insn (val);
3976
 
3977
          if (dump_file && (dump_flags & TDF_DETAILS))
3978
            {
3979
              /* We can't dump the insn with a TREE where an RTX
3980
                 is expected.  */
3981
              PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3982
              maybe_dump_rtl_for_gimple_stmt (stmt, last);
3983
              PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3984
            }
3985
 
3986
          set_curr_insn_source_location (sloc);
3987
          set_curr_insn_block (sblock);
3988
        }
3989
      else
3990
        {
3991
          if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3992
            {
3993
              bool can_fallthru;
3994
              new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3995
              if (new_bb)
3996
                {
3997
                  if (can_fallthru)
3998
                    bb = new_bb;
3999
                  else
4000
                    return new_bb;
4001
                }
4002
            }
4003
          else
4004
            {
4005
              def_operand_p def_p;
4006
              def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4007
 
4008
              if (def_p != NULL)
4009
                {
4010
                  /* Ignore this stmt if it is in the list of
4011
                     replaceable expressions.  */
4012
                  if (SA.values
4013
                      && bitmap_bit_p (SA.values,
4014
                                       SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4015
                    continue;
4016
                }
4017
              last = expand_gimple_stmt (stmt);
4018
              maybe_dump_rtl_for_gimple_stmt (stmt, last);
4019
            }
4020
        }
4021
    }
4022
 
4023
  currently_expanding_gimple_stmt = NULL;
4024
 
4025
  /* Expand implicit goto and convert goto_locus.  */
4026
  FOR_EACH_EDGE (e, ei, bb->succs)
4027
    {
4028
      if (e->goto_locus && e->goto_block)
4029
        {
4030
          set_curr_insn_source_location (e->goto_locus);
4031
          set_curr_insn_block (e->goto_block);
4032
          e->goto_locus = curr_insn_locator ();
4033
        }
4034
      e->goto_block = NULL;
4035
      if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4036
        {
4037
          emit_jump (label_rtx_for_bb (e->dest));
4038
          e->flags &= ~EDGE_FALLTHRU;
4039
        }
4040
    }
4041
 
4042
  /* Expanded RTL can create a jump in the last instruction of block.
4043
     This later might be assumed to be a jump to successor and break edge insertion.
4044
     We need to insert dummy move to prevent this. PR41440. */
4045
  if (single_succ_p (bb)
4046
      && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4047
      && (last = get_last_insn ())
4048
      && JUMP_P (last))
4049
    {
4050
      rtx dummy = gen_reg_rtx (SImode);
4051
      emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4052
    }
4053
 
4054
  do_pending_stack_adjust ();
4055
 
4056
  /* Find the block tail.  The last insn in the block is the insn
4057
     before a barrier and/or table jump insn.  */
4058
  last = get_last_insn ();
4059
  if (BARRIER_P (last))
4060
    last = PREV_INSN (last);
4061
  if (JUMP_TABLE_DATA_P (last))
4062
    last = PREV_INSN (PREV_INSN (last));
4063
  BB_END (bb) = last;
4064
 
4065
  update_bb_for_insn (bb);
4066
 
4067
  return bb;
4068
}
4069
 
4070
 
4071
/* Create a basic block for initialization code.  */
4072
 
4073
static basic_block
4074
construct_init_block (void)
4075
{
4076
  basic_block init_block, first_block;
4077
  edge e = NULL;
4078
  int flags;
4079
 
4080
  /* Multiple entry points not supported yet.  */
4081
  gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4082
  init_rtl_bb_info (ENTRY_BLOCK_PTR);
4083
  init_rtl_bb_info (EXIT_BLOCK_PTR);
4084
  ENTRY_BLOCK_PTR->flags |= BB_RTL;
4085
  EXIT_BLOCK_PTR->flags |= BB_RTL;
4086
 
4087
  e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4088
 
4089
  /* When entry edge points to first basic block, we don't need jump,
4090
     otherwise we have to jump into proper target.  */
4091
  if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4092
    {
4093
      tree label = gimple_block_label (e->dest);
4094
 
4095
      emit_jump (label_rtx (label));
4096
      flags = 0;
4097
    }
4098
  else
4099
    flags = EDGE_FALLTHRU;
4100
 
4101
  init_block = create_basic_block (NEXT_INSN (get_insns ()),
4102
                                   get_last_insn (),
4103
                                   ENTRY_BLOCK_PTR);
4104
  init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4105
  init_block->count = ENTRY_BLOCK_PTR->count;
4106
  if (e)
4107
    {
4108
      first_block = e->dest;
4109
      redirect_edge_succ (e, init_block);
4110
      e = make_edge (init_block, first_block, flags);
4111
    }
4112
  else
4113
    e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4114
  e->probability = REG_BR_PROB_BASE;
4115
  e->count = ENTRY_BLOCK_PTR->count;
4116
 
4117
  update_bb_for_insn (init_block);
4118
  return init_block;
4119
}
4120
 
4121
/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4122
   found in the block tree.  */
4123
 
4124
static void
4125
set_block_levels (tree block, int level)
4126
{
4127
  while (block)
4128
    {
4129
      BLOCK_NUMBER (block) = level;
4130
      set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4131
      block = BLOCK_CHAIN (block);
4132
    }
4133
}
4134
 
4135
/* Create a block containing landing pads and similar stuff.  */
4136
 
4137
static void
4138
construct_exit_block (void)
4139
{
4140
  rtx head = get_last_insn ();
4141
  rtx end;
4142
  basic_block exit_block;
4143
  edge e, e2;
4144
  unsigned ix;
4145
  edge_iterator ei;
4146
  rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4147
 
4148
  rtl_profile_for_bb (EXIT_BLOCK_PTR);
4149
 
4150
  /* Make sure the locus is set to the end of the function, so that
4151
     epilogue line numbers and warnings are set properly.  */
4152
  if (cfun->function_end_locus != UNKNOWN_LOCATION)
4153
    input_location = cfun->function_end_locus;
4154
 
4155
  /* The following insns belong to the top scope.  */
4156
  set_curr_insn_block (DECL_INITIAL (current_function_decl));
4157
 
4158
  /* Generate rtl for function exit.  */
4159
  expand_function_end ();
4160
 
4161
  end = get_last_insn ();
4162
  if (head == end)
4163
    return;
4164
  /* While emitting the function end we could move end of the last basic block.
4165
   */
4166
  BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4167
  while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4168
    head = NEXT_INSN (head);
4169
  exit_block = create_basic_block (NEXT_INSN (head), end,
4170
                                   EXIT_BLOCK_PTR->prev_bb);
4171
  exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4172
  exit_block->count = EXIT_BLOCK_PTR->count;
4173
 
4174
  ix = 0;
4175
  while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4176
    {
4177
      e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4178
      if (!(e->flags & EDGE_ABNORMAL))
4179
        redirect_edge_succ (e, exit_block);
4180
      else
4181
        ix++;
4182
    }
4183
 
4184
  e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4185
  e->probability = REG_BR_PROB_BASE;
4186
  e->count = EXIT_BLOCK_PTR->count;
4187
  FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4188
    if (e2 != e)
4189
      {
4190
        e->count -= e2->count;
4191
        exit_block->count -= e2->count;
4192
        exit_block->frequency -= EDGE_FREQUENCY (e2);
4193
      }
4194
  if (e->count < 0)
4195
    e->count = 0;
4196
  if (exit_block->count < 0)
4197
    exit_block->count = 0;
4198
  if (exit_block->frequency < 0)
4199
    exit_block->frequency = 0;
4200
  update_bb_for_insn (exit_block);
4201
}
4202
 
4203
/* Helper function for discover_nonconstant_array_refs.
4204
   Look for ARRAY_REF nodes with non-constant indexes and mark them
4205
   addressable.  */
4206
 
4207
static tree
4208
discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4209
                                   void *data ATTRIBUTE_UNUSED)
4210
{
4211
  tree t = *tp;
4212
 
4213
  if (IS_TYPE_OR_DECL_P (t))
4214
    *walk_subtrees = 0;
4215
  else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4216
    {
4217
      while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4218
              && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4219
              && (!TREE_OPERAND (t, 2)
4220
                  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4221
             || (TREE_CODE (t) == COMPONENT_REF
4222
                 && (!TREE_OPERAND (t,2)
4223
                     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4224
             || TREE_CODE (t) == BIT_FIELD_REF
4225
             || TREE_CODE (t) == REALPART_EXPR
4226
             || TREE_CODE (t) == IMAGPART_EXPR
4227
             || TREE_CODE (t) == VIEW_CONVERT_EXPR
4228
             || CONVERT_EXPR_P (t))
4229
        t = TREE_OPERAND (t, 0);
4230
 
4231
      if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4232
        {
4233
          t = get_base_address (t);
4234
          if (t && DECL_P (t)
4235
              && DECL_MODE (t) != BLKmode)
4236
            TREE_ADDRESSABLE (t) = 1;
4237
        }
4238
 
4239
      *walk_subtrees = 0;
4240
    }
4241
 
4242
  return NULL_TREE;
4243
}
4244
 
4245
/* RTL expansion is not able to compile array references with variable
4246
   offsets for arrays stored in single register.  Discover such
4247
   expressions and mark variables as addressable to avoid this
4248
   scenario.  */
4249
 
4250
static void
4251
discover_nonconstant_array_refs (void)
4252
{
4253
  basic_block bb;
4254
  gimple_stmt_iterator gsi;
4255
 
4256
  FOR_EACH_BB (bb)
4257
    for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4258
      {
4259
        gimple stmt = gsi_stmt (gsi);
4260
        if (!is_gimple_debug (stmt))
4261
          walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4262
      }
4263
}
4264
 
4265
/* This function sets crtl->args.internal_arg_pointer to a virtual
4266
   register if DRAP is needed.  Local register allocator will replace
4267
   virtual_incoming_args_rtx with the virtual register.  */
4268
 
4269
static void
4270
expand_stack_alignment (void)
4271
{
4272
  rtx drap_rtx;
4273
  unsigned int preferred_stack_boundary;
4274
 
4275
  if (! SUPPORTS_STACK_ALIGNMENT)
4276
    return;
4277
 
4278
  if (cfun->calls_alloca
4279
      || cfun->has_nonlocal_label
4280
      || crtl->has_nonlocal_goto)
4281
    crtl->need_drap = true;
4282
 
4283
  /* Call update_stack_boundary here again to update incoming stack
4284
     boundary.  It may set incoming stack alignment to a different
4285
     value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
4286
     use the minimum incoming stack alignment to check if it is OK
4287
     to perform sibcall optimization since sibcall optimization will
4288
     only align the outgoing stack to incoming stack boundary.  */
4289
  if (targetm.calls.update_stack_boundary)
4290
    targetm.calls.update_stack_boundary ();
4291
 
4292
  /* The incoming stack frame has to be aligned at least at
4293
     parm_stack_boundary.  */
4294
  gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4295
 
4296
  /* Update crtl->stack_alignment_estimated and use it later to align
4297
     stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
4298
     exceptions since callgraph doesn't collect incoming stack alignment
4299
     in this case.  */
4300
  if (cfun->can_throw_non_call_exceptions
4301
      && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4302
    preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4303
  else
4304
    preferred_stack_boundary = crtl->preferred_stack_boundary;
4305
  if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4306
    crtl->stack_alignment_estimated = preferred_stack_boundary;
4307
  if (preferred_stack_boundary > crtl->stack_alignment_needed)
4308
    crtl->stack_alignment_needed = preferred_stack_boundary;
4309
 
4310
  gcc_assert (crtl->stack_alignment_needed
4311
              <= crtl->stack_alignment_estimated);
4312
 
4313
  crtl->stack_realign_needed
4314
    = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4315
  crtl->stack_realign_tried = crtl->stack_realign_needed;
4316
 
4317
  crtl->stack_realign_processed = true;
4318
 
4319
  /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4320
     alignment.  */
4321
  gcc_assert (targetm.calls.get_drap_rtx != NULL);
4322
  drap_rtx = targetm.calls.get_drap_rtx ();
4323
 
4324
  /* stack_realign_drap and drap_rtx must match.  */
4325
  gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4326
 
4327
  /* Do nothing if NULL is returned, which means DRAP is not needed.  */
4328
  if (NULL != drap_rtx)
4329
    {
4330
      crtl->args.internal_arg_pointer = drap_rtx;
4331
 
4332
      /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4333
         needed. */
4334
      fixup_tail_calls ();
4335
    }
4336
}
4337
 
4338
/* Translate the intermediate representation contained in the CFG
4339
   from GIMPLE trees to RTL.
4340
 
4341
   We do conversion per basic block and preserve/update the tree CFG.
4342
   This implies we have to do some magic as the CFG can simultaneously
4343
   consist of basic blocks containing RTL and GIMPLE trees.  This can
4344
   confuse the CFG hooks, so be careful to not manipulate CFG during
4345
   the expansion.  */
4346
 
4347
static unsigned int
4348
gimple_expand_cfg (void)
4349
{
4350
  basic_block bb, init_block;
4351
  sbitmap blocks;
4352
  edge_iterator ei;
4353
  edge e;
4354
  rtx var_seq;
4355
  unsigned i;
4356
 
4357
  timevar_push (TV_OUT_OF_SSA);
4358
  rewrite_out_of_ssa (&SA);
4359
  timevar_pop (TV_OUT_OF_SSA);
4360
  SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4361
                                           sizeof (rtx));
4362
 
4363
  /* Some backends want to know that we are expanding to RTL.  */
4364
  currently_expanding_to_rtl = 1;
4365
 
4366
  rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4367
 
4368
  insn_locators_alloc ();
4369
  if (!DECL_IS_BUILTIN (current_function_decl))
4370
    {
4371
      /* Eventually, all FEs should explicitly set function_start_locus.  */
4372
      if (cfun->function_start_locus == UNKNOWN_LOCATION)
4373
       set_curr_insn_source_location
4374
         (DECL_SOURCE_LOCATION (current_function_decl));
4375
      else
4376
       set_curr_insn_source_location (cfun->function_start_locus);
4377
    }
4378
  else
4379
    set_curr_insn_source_location (UNKNOWN_LOCATION);
4380
  set_curr_insn_block (DECL_INITIAL (current_function_decl));
4381
  prologue_locator = curr_insn_locator ();
4382
 
4383
#ifdef INSN_SCHEDULING
4384
  init_sched_attrs ();
4385
#endif
4386
 
4387
  /* Make sure first insn is a note even if we don't want linenums.
4388
     This makes sure the first insn will never be deleted.
4389
     Also, final expects a note to appear there.  */
4390
  emit_note (NOTE_INSN_DELETED);
4391
 
4392
  /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
4393
  discover_nonconstant_array_refs ();
4394
 
4395
  targetm.expand_to_rtl_hook ();
4396
  crtl->stack_alignment_needed = STACK_BOUNDARY;
4397
  crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4398
  crtl->stack_alignment_estimated = 0;
4399
  crtl->preferred_stack_boundary = STACK_BOUNDARY;
4400
  cfun->cfg->max_jumptable_ents = 0;
4401
 
4402
  /* Resovle the function section.  Some targets, like ARM EABI rely on knowledge
4403
     of the function section at exapnsion time to predict distance of calls.  */
4404
  resolve_unique_section (current_function_decl, 0, flag_function_sections);
4405
 
4406
  /* Expand the variables recorded during gimple lowering.  */
4407
  timevar_push (TV_VAR_EXPAND);
4408
  start_sequence ();
4409
 
4410
  expand_used_vars ();
4411
 
4412
  var_seq = get_insns ();
4413
  end_sequence ();
4414
  timevar_pop (TV_VAR_EXPAND);
4415
 
4416
  /* Honor stack protection warnings.  */
4417
  if (warn_stack_protect)
4418
    {
4419
      if (cfun->calls_alloca)
4420
        warning (OPT_Wstack_protector,
4421
                 "stack protector not protecting local variables: "
4422
                 "variable length buffer");
4423
      if (has_short_buffer && !crtl->stack_protect_guard)
4424
        warning (OPT_Wstack_protector,
4425
                 "stack protector not protecting function: "
4426
                 "all local arrays are less than %d bytes long",
4427
                 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4428
    }
4429
 
4430
  /* Set up parameters and prepare for return, for the function.  */
4431
  expand_function_start (current_function_decl);
4432
 
4433
  /* If we emitted any instructions for setting up the variables,
4434
     emit them before the FUNCTION_START note.  */
4435
  if (var_seq)
4436
    {
4437
      emit_insn_before (var_seq, parm_birth_insn);
4438
 
4439
      /* In expand_function_end we'll insert the alloca save/restore
4440
         before parm_birth_insn.  We've just insertted an alloca call.
4441
         Adjust the pointer to match.  */
4442
      parm_birth_insn = var_seq;
4443
    }
4444
 
4445
  /* Now that we also have the parameter RTXs, copy them over to our
4446
     partitions.  */
4447
  for (i = 0; i < SA.map->num_partitions; i++)
4448
    {
4449
      tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4450
 
4451
      if (TREE_CODE (var) != VAR_DECL
4452
          && !SA.partition_to_pseudo[i])
4453
        SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4454
      gcc_assert (SA.partition_to_pseudo[i]);
4455
 
4456
      /* If this decl was marked as living in multiple places, reset
4457
         this now to NULL.  */
4458
      if (DECL_RTL_IF_SET (var) == pc_rtx)
4459
        SET_DECL_RTL (var, NULL);
4460
 
4461
      /* Some RTL parts really want to look at DECL_RTL(x) when x
4462
         was a decl marked in REG_ATTR or MEM_ATTR.  We could use
4463
         SET_DECL_RTL here making this available, but that would mean
4464
         to select one of the potentially many RTLs for one DECL.  Instead
4465
         of doing that we simply reset the MEM_EXPR of the RTL in question,
4466
         then nobody can get at it and hence nobody can call DECL_RTL on it.  */
4467
      if (!DECL_RTL_SET_P (var))
4468
        {
4469
          if (MEM_P (SA.partition_to_pseudo[i]))
4470
            set_mem_expr (SA.partition_to_pseudo[i], NULL);
4471
        }
4472
    }
4473
 
4474
  /* If we have a class containing differently aligned pointers
4475
     we need to merge those into the corresponding RTL pointer
4476
     alignment.  */
4477
  for (i = 1; i < num_ssa_names; i++)
4478
    {
4479
      tree name = ssa_name (i);
4480
      int part;
4481
      rtx r;
4482
 
4483
      if (!name
4484
          || !POINTER_TYPE_P (TREE_TYPE (name))
4485
          /* We might have generated new SSA names in
4486
             update_alias_info_with_stack_vars.  They will have a NULL
4487
             defining statements, and won't be part of the partitioning,
4488
             so ignore those.  */
4489
          || !SSA_NAME_DEF_STMT (name))
4490
        continue;
4491
      part = var_to_partition (SA.map, name);
4492
      if (part == NO_PARTITION)
4493
        continue;
4494
      r = SA.partition_to_pseudo[part];
4495
      if (REG_P (r))
4496
        mark_reg_pointer (r, get_pointer_alignment (name));
4497
    }
4498
 
4499
  /* If this function is `main', emit a call to `__main'
4500
     to run global initializers, etc.  */
4501
  if (DECL_NAME (current_function_decl)
4502
      && MAIN_NAME_P (DECL_NAME (current_function_decl))
4503
      && DECL_FILE_SCOPE_P (current_function_decl))
4504
    expand_main_function ();
4505
 
4506
  /* Initialize the stack_protect_guard field.  This must happen after the
4507
     call to __main (if any) so that the external decl is initialized.  */
4508
  if (crtl->stack_protect_guard)
4509
    stack_protect_prologue ();
4510
 
4511
  expand_phi_nodes (&SA);
4512
 
4513
  /* Register rtl specific functions for cfg.  */
4514
  rtl_register_cfg_hooks ();
4515
 
4516
  init_block = construct_init_block ();
4517
 
4518
  /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
4519
     remaining edges later.  */
4520
  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4521
    e->flags &= ~EDGE_EXECUTABLE;
4522
 
4523
  lab_rtx_for_bb = pointer_map_create ();
4524
  FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4525
    bb = expand_gimple_basic_block (bb);
4526
 
4527
  if (MAY_HAVE_DEBUG_INSNS)
4528
    expand_debug_locations ();
4529
 
4530
  execute_free_datastructures ();
4531
  timevar_push (TV_OUT_OF_SSA);
4532
  finish_out_of_ssa (&SA);
4533
  timevar_pop (TV_OUT_OF_SSA);
4534
 
4535
  timevar_push (TV_POST_EXPAND);
4536
  /* We are no longer in SSA form.  */
4537
  cfun->gimple_df->in_ssa_p = false;
4538
 
4539
  /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4540
     conservatively to true until they are all profile aware.  */
4541
  pointer_map_destroy (lab_rtx_for_bb);
4542
  free_histograms ();
4543
 
4544
  construct_exit_block ();
4545
  set_curr_insn_block (DECL_INITIAL (current_function_decl));
4546
  insn_locators_finalize ();
4547
 
4548
  /* Zap the tree EH table.  */
4549
  set_eh_throw_stmt_table (cfun, NULL);
4550
 
4551
  /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4552
     split edges which edge insertions might do.  */
4553
  rebuild_jump_labels (get_insns ());
4554
 
4555
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4556
    {
4557
      edge e;
4558
      edge_iterator ei;
4559
      for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4560
        {
4561
          if (e->insns.r)
4562
            {
4563
              rebuild_jump_labels_chain (e->insns.r);
4564
              /* Avoid putting insns before parm_birth_insn.  */
4565
              if (e->src == ENTRY_BLOCK_PTR
4566
                  && single_succ_p (ENTRY_BLOCK_PTR)
4567
                  && parm_birth_insn)
4568
                {
4569
                  rtx insns = e->insns.r;
4570
                  e->insns.r = NULL_RTX;
4571
                  emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4572
                }
4573
              else
4574
                commit_one_edge_insertion (e);
4575
            }
4576
          else
4577
            ei_next (&ei);
4578
        }
4579
    }
4580
 
4581
  /* We're done expanding trees to RTL.  */
4582
  currently_expanding_to_rtl = 0;
4583
 
4584
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4585
    {
4586
      edge e;
4587
      edge_iterator ei;
4588
      for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4589
        {
4590
          /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
4591
          e->flags &= ~EDGE_EXECUTABLE;
4592
 
4593
          /* At the moment not all abnormal edges match the RTL
4594
             representation.  It is safe to remove them here as
4595
             find_many_sub_basic_blocks will rediscover them.
4596
             In the future we should get this fixed properly.  */
4597
          if ((e->flags & EDGE_ABNORMAL)
4598
              && !(e->flags & EDGE_SIBCALL))
4599
            remove_edge (e);
4600
          else
4601
            ei_next (&ei);
4602
        }
4603
    }
4604
 
4605
  blocks = sbitmap_alloc (last_basic_block);
4606
  sbitmap_ones (blocks);
4607
  find_many_sub_basic_blocks (blocks);
4608
  sbitmap_free (blocks);
4609
  purge_all_dead_edges ();
4610
 
4611
  compact_blocks ();
4612
 
4613
  expand_stack_alignment ();
4614
 
4615
#ifdef ENABLE_CHECKING
4616
  verify_flow_info ();
4617
#endif
4618
 
4619
  /* There's no need to defer outputting this function any more; we
4620
     know we want to output it.  */
4621
  DECL_DEFER_OUTPUT (current_function_decl) = 0;
4622
 
4623
  /* Now that we're done expanding trees to RTL, we shouldn't have any
4624
     more CONCATs anywhere.  */
4625
  generating_concat_p = 0;
4626
 
4627
  if (dump_file)
4628
    {
4629
      fprintf (dump_file,
4630
               "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4631
      /* And the pass manager will dump RTL for us.  */
4632
    }
4633
 
4634
  /* If we're emitting a nested function, make sure its parent gets
4635
     emitted as well.  Doing otherwise confuses debug info.  */
4636
  {
4637
    tree parent;
4638
    for (parent = DECL_CONTEXT (current_function_decl);
4639
         parent != NULL_TREE;
4640
         parent = get_containing_scope (parent))
4641
      if (TREE_CODE (parent) == FUNCTION_DECL)
4642
        TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4643
  }
4644
 
4645
  /* We are now committed to emitting code for this function.  Do any
4646
     preparation, such as emitting abstract debug info for the inline
4647
     before it gets mangled by optimization.  */
4648
  if (cgraph_function_possibly_inlined_p (current_function_decl))
4649
    (*debug_hooks->outlining_inline_function) (current_function_decl);
4650
 
4651
  TREE_ASM_WRITTEN (current_function_decl) = 1;
4652
 
4653
  /* After expanding, the return labels are no longer needed. */
4654
  return_label = NULL;
4655
  naked_return_label = NULL;
4656
 
4657
  /* After expanding, the tm_restart map is no longer needed.  */
4658
  if (cfun->gimple_df->tm_restart)
4659
    {
4660
      htab_delete (cfun->gimple_df->tm_restart);
4661
      cfun->gimple_df->tm_restart = NULL;
4662
    }
4663
 
4664
  /* Tag the blocks with a depth number so that change_scope can find
4665
     the common parent easily.  */
4666
  set_block_levels (DECL_INITIAL (cfun->decl), 0);
4667
  default_rtl_profile ();
4668
  timevar_pop (TV_POST_EXPAND);
4669
  return 0;
4670
}
4671
 
4672
struct rtl_opt_pass pass_expand =
4673
{
4674
 {
4675
  RTL_PASS,
4676
  "expand",                             /* name */
4677
  NULL,                                 /* gate */
4678
  gimple_expand_cfg,                    /* execute */
4679
  NULL,                                 /* sub */
4680
  NULL,                                 /* next */
4681
  0,                                    /* static_pass_number */
4682
  TV_EXPAND,                            /* tv_id */
4683
  PROP_ssa | PROP_gimple_leh | PROP_cfg
4684
    | PROP_gimple_lcx,                  /* properties_required */
4685
  PROP_rtl,                             /* properties_provided */
4686
  PROP_ssa | PROP_trees,                /* properties_destroyed */
4687
  TODO_verify_ssa | TODO_verify_flow
4688
    | TODO_verify_stmts,                /* todo_flags_start */
4689
  TODO_ggc_collect                      /* todo_flags_finish */
4690
 }
4691
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.