OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [gimplify.c] - Blame information for rev 732

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Tree lowering pass.  This pass converts the GENERIC functions-as-trees
2
   tree representation into the GIMPLE form.
3
   Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4
   2012 Free Software Foundation, Inc.
5
   Major work done by Sebastian Pop <s.pop@laposte.net>,
6
   Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7
 
8
This file is part of GCC.
9
 
10
GCC is free software; you can redistribute it and/or modify it under
11
the terms of the GNU General Public License as published by the Free
12
Software Foundation; either version 3, or (at your option) any later
13
version.
14
 
15
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16
WARRANTY; without even the implied warranty of MERCHANTABILITY or
17
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
18
for more details.
19
 
20
You should have received a copy of the GNU General Public License
21
along with GCC; see the file COPYING3.  If not see
22
<http://www.gnu.org/licenses/>.  */
23
 
24
#include "config.h"
25
#include "system.h"
26
#include "coretypes.h"
27
#include "tm.h"
28
#include "tree.h"
29
#include "gimple.h"
30
#include "tree-iterator.h"
31
#include "tree-inline.h"
32
#include "tree-pretty-print.h"
33
#include "langhooks.h"
34
#include "tree-flow.h"
35
#include "cgraph.h"
36
#include "timevar.h"
37
#include "hashtab.h"
38
#include "flags.h"
39
#include "function.h"
40
#include "output.h"
41
#include "ggc.h"
42
#include "diagnostic-core.h"
43
#include "target.h"
44
#include "pointer-set.h"
45
#include "splay-tree.h"
46
#include "vec.h"
47
#include "gimple.h"
48
#include "tree-pass.h"
49
 
50
#include "langhooks-def.h"      /* FIXME: for lhd_set_decl_assembler_name.  */
51
#include "expr.h"               /* FIXME: for can_move_by_pieces
52
                                   and STACK_CHECK_MAX_VAR_SIZE.  */
53
 
54
enum gimplify_omp_var_data
55
{
56
  GOVD_SEEN = 1,
57
  GOVD_EXPLICIT = 2,
58
  GOVD_SHARED = 4,
59
  GOVD_PRIVATE = 8,
60
  GOVD_FIRSTPRIVATE = 16,
61
  GOVD_LASTPRIVATE = 32,
62
  GOVD_REDUCTION = 64,
63
  GOVD_LOCAL = 128,
64
  GOVD_DEBUG_PRIVATE = 256,
65
  GOVD_PRIVATE_OUTER_REF = 512,
66
  GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
67
                           | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
68
};
69
 
70
 
71
enum omp_region_type
72
{
73
  ORT_WORKSHARE = 0,
74
  ORT_PARALLEL = 2,
75
  ORT_COMBINED_PARALLEL = 3,
76
  ORT_TASK = 4,
77
  ORT_UNTIED_TASK = 5
78
};
79
 
80
struct gimplify_omp_ctx
81
{
82
  struct gimplify_omp_ctx *outer_context;
83
  splay_tree variables;
84
  struct pointer_set_t *privatized_types;
85
  location_t location;
86
  enum omp_clause_default_kind default_kind;
87
  enum omp_region_type region_type;
88
};
89
 
90
static struct gimplify_ctx *gimplify_ctxp;
91
static struct gimplify_omp_ctx *gimplify_omp_ctxp;
92
 
93
 
94
/* Formal (expression) temporary table handling: multiple occurrences of
95
   the same scalar expression are evaluated into the same temporary.  */
96
 
97
typedef struct gimple_temp_hash_elt
98
{
99
  tree val;   /* Key */
100
  tree temp;  /* Value */
101
} elt_t;
102
 
103
/* Forward declaration.  */
104
static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
105
 
106
/* Mark X addressable.  Unlike the langhook we expect X to be in gimple
107
   form and we don't do any syntax checking.  */
108
 
109
void
110
mark_addressable (tree x)
111
{
112
  while (handled_component_p (x))
113
    x = TREE_OPERAND (x, 0);
114
  if (TREE_CODE (x) == MEM_REF
115
      && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
116
    x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
117
  if (TREE_CODE (x) != VAR_DECL
118
      && TREE_CODE (x) != PARM_DECL
119
      && TREE_CODE (x) != RESULT_DECL)
120
    return;
121
  TREE_ADDRESSABLE (x) = 1;
122
}
123
 
124
/* Return a hash value for a formal temporary table entry.  */
125
 
126
static hashval_t
127
gimple_tree_hash (const void *p)
128
{
129
  tree t = ((const elt_t *) p)->val;
130
  return iterative_hash_expr (t, 0);
131
}
132
 
133
/* Compare two formal temporary table entries.  */
134
 
135
static int
136
gimple_tree_eq (const void *p1, const void *p2)
137
{
138
  tree t1 = ((const elt_t *) p1)->val;
139
  tree t2 = ((const elt_t *) p2)->val;
140
  enum tree_code code = TREE_CODE (t1);
141
 
142
  if (TREE_CODE (t2) != code
143
      || TREE_TYPE (t1) != TREE_TYPE (t2))
144
    return 0;
145
 
146
  if (!operand_equal_p (t1, t2, 0))
147
    return 0;
148
 
149
#ifdef ENABLE_CHECKING
150
  /* Only allow them to compare equal if they also hash equal; otherwise
151
     results are nondeterminate, and we fail bootstrap comparison.  */
152
  gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
153
#endif
154
 
155
  return 1;
156
}
157
 
158
/* Link gimple statement GS to the end of the sequence *SEQ_P.  If
159
   *SEQ_P is NULL, a new sequence is allocated.  This function is
160
   similar to gimple_seq_add_stmt, but does not scan the operands.
161
   During gimplification, we need to manipulate statement sequences
162
   before the def/use vectors have been constructed.  */
163
 
164
void
165
gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
166
{
167
  gimple_stmt_iterator si;
168
 
169
  if (gs == NULL)
170
    return;
171
 
172
  if (*seq_p == NULL)
173
    *seq_p = gimple_seq_alloc ();
174
 
175
  si = gsi_last (*seq_p);
176
 
177
  gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
178
}
179
 
180
/* Shorter alias name for the above function for use in gimplify.c
181
   only.  */
182
 
183
static inline void
184
gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
185
{
186
  gimple_seq_add_stmt_without_update (seq_p, gs);
187
}
188
 
189
/* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
190
   NULL, a new sequence is allocated.   This function is
191
   similar to gimple_seq_add_seq, but does not scan the operands.
192
   During gimplification, we need to manipulate statement sequences
193
   before the def/use vectors have been constructed.  */
194
 
195
static void
196
gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
197
{
198
  gimple_stmt_iterator si;
199
 
200
  if (src == NULL)
201
    return;
202
 
203
  if (*dst_p == NULL)
204
    *dst_p = gimple_seq_alloc ();
205
 
206
  si = gsi_last (*dst_p);
207
  gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
208
}
209
 
210
/* Set up a context for the gimplifier.  */
211
 
212
void
213
push_gimplify_context (struct gimplify_ctx *c)
214
{
215
  memset (c, '\0', sizeof (*c));
216
  c->prev_context = gimplify_ctxp;
217
  gimplify_ctxp = c;
218
}
219
 
220
/* Tear down a context for the gimplifier.  If BODY is non-null, then
221
   put the temporaries into the outer BIND_EXPR.  Otherwise, put them
222
   in the local_decls.
223
 
224
   BODY is not a sequence, but the first tuple in a sequence.  */
225
 
226
void
227
pop_gimplify_context (gimple body)
228
{
229
  struct gimplify_ctx *c = gimplify_ctxp;
230
 
231
  gcc_assert (c && (c->bind_expr_stack == NULL
232
                    || VEC_empty (gimple, c->bind_expr_stack)));
233
  VEC_free (gimple, heap, c->bind_expr_stack);
234
  gimplify_ctxp = c->prev_context;
235
 
236
  if (body)
237
    declare_vars (c->temps, body, false);
238
  else
239
    record_vars (c->temps);
240
 
241
  if (c->temp_htab)
242
    htab_delete (c->temp_htab);
243
}
244
 
245
/* Push a GIMPLE_BIND tuple onto the stack of bindings.  */
246
 
247
static void
248
gimple_push_bind_expr (gimple gimple_bind)
249
{
250
  if (gimplify_ctxp->bind_expr_stack == NULL)
251
    gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
252
  VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
253
}
254
 
255
/* Pop the first element off the stack of bindings.  */
256
 
257
static void
258
gimple_pop_bind_expr (void)
259
{
260
  VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
261
}
262
 
263
/* Return the first element of the stack of bindings.  */
264
 
265
gimple
266
gimple_current_bind_expr (void)
267
{
268
  return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
269
}
270
 
271
/* Return the stack of bindings created during gimplification.  */
272
 
273
VEC(gimple, heap) *
274
gimple_bind_expr_stack (void)
275
{
276
  return gimplify_ctxp->bind_expr_stack;
277
}
278
 
279
/* Return true iff there is a COND_EXPR between us and the innermost
280
   CLEANUP_POINT_EXPR.  This info is used by gimple_push_cleanup.  */
281
 
282
static bool
283
gimple_conditional_context (void)
284
{
285
  return gimplify_ctxp->conditions > 0;
286
}
287
 
288
/* Note that we've entered a COND_EXPR.  */
289
 
290
static void
291
gimple_push_condition (void)
292
{
293
#ifdef ENABLE_GIMPLE_CHECKING
294
  if (gimplify_ctxp->conditions == 0)
295
    gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
296
#endif
297
  ++(gimplify_ctxp->conditions);
298
}
299
 
300
/* Note that we've left a COND_EXPR.  If we're back at unconditional scope
301
   now, add any conditional cleanups we've seen to the prequeue.  */
302
 
303
static void
304
gimple_pop_condition (gimple_seq *pre_p)
305
{
306
  int conds = --(gimplify_ctxp->conditions);
307
 
308
  gcc_assert (conds >= 0);
309
  if (conds == 0)
310
    {
311
      gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
312
      gimplify_ctxp->conditional_cleanups = NULL;
313
    }
314
}
315
 
316
/* A stable comparison routine for use with splay trees and DECLs.  */
317
 
318
static int
319
splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
320
{
321
  tree a = (tree) xa;
322
  tree b = (tree) xb;
323
 
324
  return DECL_UID (a) - DECL_UID (b);
325
}
326
 
327
/* Create a new omp construct that deals with variable remapping.  */
328
 
329
static struct gimplify_omp_ctx *
330
new_omp_context (enum omp_region_type region_type)
331
{
332
  struct gimplify_omp_ctx *c;
333
 
334
  c = XCNEW (struct gimplify_omp_ctx);
335
  c->outer_context = gimplify_omp_ctxp;
336
  c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
337
  c->privatized_types = pointer_set_create ();
338
  c->location = input_location;
339
  c->region_type = region_type;
340
  if ((region_type & ORT_TASK) == 0)
341
    c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
342
  else
343
    c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
344
 
345
  return c;
346
}
347
 
348
/* Destroy an omp construct that deals with variable remapping.  */
349
 
350
static void
351
delete_omp_context (struct gimplify_omp_ctx *c)
352
{
353
  splay_tree_delete (c->variables);
354
  pointer_set_destroy (c->privatized_types);
355
  XDELETE (c);
356
}
357
 
358
static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
359
static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
360
 
361
/* Both gimplify the statement T and append it to *SEQ_P.  This function
362
   behaves exactly as gimplify_stmt, but you don't have to pass T as a
363
   reference.  */
364
 
365
void
366
gimplify_and_add (tree t, gimple_seq *seq_p)
367
{
368
  gimplify_stmt (&t, seq_p);
369
}
370
 
371
/* Gimplify statement T into sequence *SEQ_P, and return the first
372
   tuple in the sequence of generated tuples for this statement.
373
   Return NULL if gimplifying T produced no tuples.  */
374
 
375
static gimple
376
gimplify_and_return_first (tree t, gimple_seq *seq_p)
377
{
378
  gimple_stmt_iterator last = gsi_last (*seq_p);
379
 
380
  gimplify_and_add (t, seq_p);
381
 
382
  if (!gsi_end_p (last))
383
    {
384
      gsi_next (&last);
385
      return gsi_stmt (last);
386
    }
387
  else
388
    return gimple_seq_first_stmt (*seq_p);
389
}
390
 
391
/* Strip off a legitimate source ending from the input string NAME of
392
   length LEN.  Rather than having to know the names used by all of
393
   our front ends, we strip off an ending of a period followed by
394
   up to five characters.  (Java uses ".class".)  */
395
 
396
static inline void
397
remove_suffix (char *name, int len)
398
{
399
  int i;
400
 
401
  for (i = 2;  i < 8 && len > i;  i++)
402
    {
403
      if (name[len - i] == '.')
404
        {
405
          name[len - i] = '\0';
406
          break;
407
        }
408
    }
409
}
410
 
411
/* Create a new temporary name with PREFIX.  Return an identifier.  */
412
 
413
static GTY(()) unsigned int tmp_var_id_num;
414
 
415
tree
416
create_tmp_var_name (const char *prefix)
417
{
418
  char *tmp_name;
419
 
420
  if (prefix)
421
    {
422
      char *preftmp = ASTRDUP (prefix);
423
 
424
      remove_suffix (preftmp, strlen (preftmp));
425
      clean_symbol_name (preftmp);
426
 
427
      prefix = preftmp;
428
    }
429
 
430
  ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
431
  return get_identifier (tmp_name);
432
}
433
 
434
/* Create a new temporary variable declaration of type TYPE.
435
   Do NOT push it into the current binding.  */
436
 
437
tree
438
create_tmp_var_raw (tree type, const char *prefix)
439
{
440
  tree tmp_var;
441
 
442
  tmp_var = build_decl (input_location,
443
                        VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
444
                        type);
445
 
446
  /* The variable was declared by the compiler.  */
447
  DECL_ARTIFICIAL (tmp_var) = 1;
448
  /* And we don't want debug info for it.  */
449
  DECL_IGNORED_P (tmp_var) = 1;
450
 
451
  /* Make the variable writable.  */
452
  TREE_READONLY (tmp_var) = 0;
453
 
454
  DECL_EXTERNAL (tmp_var) = 0;
455
  TREE_STATIC (tmp_var) = 0;
456
  TREE_USED (tmp_var) = 1;
457
 
458
  return tmp_var;
459
}
460
 
461
/* Create a new temporary variable declaration of type TYPE.  DO push the
462
   variable into the current binding.  Further, assume that this is called
463
   only from gimplification or optimization, at which point the creation of
464
   certain types are bugs.  */
465
 
466
tree
467
create_tmp_var (tree type, const char *prefix)
468
{
469
  tree tmp_var;
470
 
471
  /* We don't allow types that are addressable (meaning we can't make copies),
472
     or incomplete.  We also used to reject every variable size objects here,
473
     but now support those for which a constant upper bound can be obtained.
474
     The processing for variable sizes is performed in gimple_add_tmp_var,
475
     point at which it really matters and possibly reached via paths not going
476
     through this function, e.g. after direct calls to create_tmp_var_raw.  */
477
  gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
478
 
479
  tmp_var = create_tmp_var_raw (type, prefix);
480
  gimple_add_tmp_var (tmp_var);
481
  return tmp_var;
482
}
483
 
484
/* Create a new temporary variable declaration of type TYPE by calling
485
   create_tmp_var and if TYPE is a vector or a complex number, mark the new
486
   temporary as gimple register.  */
487
 
488
tree
489
create_tmp_reg (tree type, const char *prefix)
490
{
491
  tree tmp;
492
 
493
  tmp = create_tmp_var (type, prefix);
494
  if (TREE_CODE (type) == COMPLEX_TYPE
495
      || TREE_CODE (type) == VECTOR_TYPE)
496
    DECL_GIMPLE_REG_P (tmp) = 1;
497
 
498
  return tmp;
499
}
500
 
501
/* Create a temporary with a name derived from VAL.  Subroutine of
502
   lookup_tmp_var; nobody else should call this function.  */
503
 
504
static inline tree
505
create_tmp_from_val (tree val)
506
{
507
  /* Drop all qualifiers and address-space information from the value type.  */
508
  return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
509
}
510
 
511
/* Create a temporary to hold the value of VAL.  If IS_FORMAL, try to reuse
512
   an existing expression temporary.  */
513
 
514
static tree
515
lookup_tmp_var (tree val, bool is_formal)
516
{
517
  tree ret;
518
 
519
  /* If not optimizing, never really reuse a temporary.  local-alloc
520
     won't allocate any variable that is used in more than one basic
521
     block, which means it will go into memory, causing much extra
522
     work in reload and final and poorer code generation, outweighing
523
     the extra memory allocation here.  */
524
  if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
525
    ret = create_tmp_from_val (val);
526
  else
527
    {
528
      elt_t elt, *elt_p;
529
      void **slot;
530
 
531
      elt.val = val;
532
      if (gimplify_ctxp->temp_htab == NULL)
533
        gimplify_ctxp->temp_htab
534
          = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
535
      slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
536
      if (*slot == NULL)
537
        {
538
          elt_p = XNEW (elt_t);
539
          elt_p->val = val;
540
          elt_p->temp = ret = create_tmp_from_val (val);
541
          *slot = (void *) elt_p;
542
        }
543
      else
544
        {
545
          elt_p = (elt_t *) *slot;
546
          ret = elt_p->temp;
547
        }
548
    }
549
 
550
  return ret;
551
}
552
 
553
/* Return true if T is a CALL_EXPR or an expression that can be
554
   assigned to a temporary.  Note that this predicate should only be
555
   used during gimplification.  See the rationale for this in
556
   gimplify_modify_expr.  */
557
 
558
static bool
559
is_gimple_reg_rhs_or_call (tree t)
560
{
561
  return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
562
          || TREE_CODE (t) == CALL_EXPR);
563
}
564
 
565
/* Return true if T is a valid memory RHS or a CALL_EXPR.  Note that
566
   this predicate should only be used during gimplification.  See the
567
   rationale for this in gimplify_modify_expr.  */
568
 
569
static bool
570
is_gimple_mem_rhs_or_call (tree t)
571
{
572
  /* If we're dealing with a renamable type, either source or dest must be
573
     a renamed variable.  */
574
  if (is_gimple_reg_type (TREE_TYPE (t)))
575
    return is_gimple_val (t);
576
  else
577
    return (is_gimple_val (t) || is_gimple_lvalue (t)
578
            || TREE_CODE (t) == CALL_EXPR);
579
}
580
 
581
/* Helper for get_formal_tmp_var and get_initialized_tmp_var.  */
582
 
583
static tree
584
internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
585
                      bool is_formal)
586
{
587
  tree t, mod;
588
 
589
  /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
590
     can create an INIT_EXPR and convert it into a GIMPLE_CALL below.  */
591
  gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
592
                 fb_rvalue);
593
 
594
  t = lookup_tmp_var (val, is_formal);
595
 
596
  if (is_formal
597
      && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
598
          || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
599
    DECL_GIMPLE_REG_P (t) = 1;
600
 
601
  mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
602
 
603
  SET_EXPR_LOCATION (mod, EXPR_LOC_OR_HERE (val));
604
 
605
  /* gimplify_modify_expr might want to reduce this further.  */
606
  gimplify_and_add (mod, pre_p);
607
  ggc_free (mod);
608
 
609
  /* If we're gimplifying into ssa, gimplify_modify_expr will have
610
     given our temporary an SSA name.  Find and return it.  */
611
  if (gimplify_ctxp->into_ssa)
612
    {
613
      gimple last = gimple_seq_last_stmt (*pre_p);
614
      t = gimple_get_lhs (last);
615
    }
616
 
617
  return t;
618
}
619
 
620
/* Return a formal temporary variable initialized with VAL.  PRE_P is as
621
   in gimplify_expr.  Only use this function if:
622
 
623
   1) The value of the unfactored expression represented by VAL will not
624
      change between the initialization and use of the temporary, and
625
   2) The temporary will not be otherwise modified.
626
 
627
   For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
628
   and #2 means it is inappropriate for && temps.
629
 
630
   For other cases, use get_initialized_tmp_var instead.  */
631
 
632
tree
633
get_formal_tmp_var (tree val, gimple_seq *pre_p)
634
{
635
  return internal_get_tmp_var (val, pre_p, NULL, true);
636
}
637
 
638
/* Return a temporary variable initialized with VAL.  PRE_P and POST_P
639
   are as in gimplify_expr.  */
640
 
641
tree
642
get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
643
{
644
  return internal_get_tmp_var (val, pre_p, post_p, false);
645
}
646
 
647
/* Declare all the variables in VARS in SCOPE.  If DEBUG_INFO is true,
648
   generate debug info for them; otherwise don't.  */
649
 
650
void
651
declare_vars (tree vars, gimple scope, bool debug_info)
652
{
653
  tree last = vars;
654
  if (last)
655
    {
656
      tree temps, block;
657
 
658
      gcc_assert (gimple_code (scope) == GIMPLE_BIND);
659
 
660
      temps = nreverse (last);
661
 
662
      block = gimple_bind_block (scope);
663
      gcc_assert (!block || TREE_CODE (block) == BLOCK);
664
      if (!block || !debug_info)
665
        {
666
          DECL_CHAIN (last) = gimple_bind_vars (scope);
667
          gimple_bind_set_vars (scope, temps);
668
        }
669
      else
670
        {
671
          /* We need to attach the nodes both to the BIND_EXPR and to its
672
             associated BLOCK for debugging purposes.  The key point here
673
             is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
674
             is a subchain of the BIND_EXPR_VARS of the BIND_EXPR.  */
675
          if (BLOCK_VARS (block))
676
            BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
677
          else
678
            {
679
              gimple_bind_set_vars (scope,
680
                                    chainon (gimple_bind_vars (scope), temps));
681
              BLOCK_VARS (block) = temps;
682
            }
683
        }
684
    }
685
}
686
 
687
/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
688
   for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly.  Abort if
689
   no such upper bound can be obtained.  */
690
 
691
static void
692
force_constant_size (tree var)
693
{
694
  /* The only attempt we make is by querying the maximum size of objects
695
     of the variable's type.  */
696
 
697
  HOST_WIDE_INT max_size;
698
 
699
  gcc_assert (TREE_CODE (var) == VAR_DECL);
700
 
701
  max_size = max_int_size_in_bytes (TREE_TYPE (var));
702
 
703
  gcc_assert (max_size >= 0);
704
 
705
  DECL_SIZE_UNIT (var)
706
    = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
707
  DECL_SIZE (var)
708
    = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
709
}
710
 
711
/* Push the temporary variable TMP into the current binding.  */
712
 
713
void
714
gimple_add_tmp_var (tree tmp)
715
{
716
  gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
717
 
718
  /* Later processing assumes that the object size is constant, which might
719
     not be true at this point.  Force the use of a constant upper bound in
720
     this case.  */
721
  if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
722
    force_constant_size (tmp);
723
 
724
  DECL_CONTEXT (tmp) = current_function_decl;
725
  DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
726
 
727
  if (gimplify_ctxp)
728
    {
729
      DECL_CHAIN (tmp) = gimplify_ctxp->temps;
730
      gimplify_ctxp->temps = tmp;
731
 
732
      /* Mark temporaries local within the nearest enclosing parallel.  */
733
      if (gimplify_omp_ctxp)
734
        {
735
          struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
736
          while (ctx && ctx->region_type == ORT_WORKSHARE)
737
            ctx = ctx->outer_context;
738
          if (ctx)
739
            omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
740
        }
741
    }
742
  else if (cfun)
743
    record_vars (tmp);
744
  else
745
    {
746
      gimple_seq body_seq;
747
 
748
      /* This case is for nested functions.  We need to expose the locals
749
         they create.  */
750
      body_seq = gimple_body (current_function_decl);
751
      declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
752
    }
753
}
754
 
755
/* Determine whether to assign a location to the statement GS.  */
756
 
757
static bool
758
should_carry_location_p (gimple gs)
759
{
760
  /* Don't emit a line note for a label.  We particularly don't want to
761
     emit one for the break label, since it doesn't actually correspond
762
     to the beginning of the loop/switch.  */
763
  if (gimple_code (gs) == GIMPLE_LABEL)
764
    return false;
765
 
766
  return true;
767
}
768
 
769
/* Return true if a location should not be emitted for this statement
770
   by annotate_one_with_location.  */
771
 
772
static inline bool
773
gimple_do_not_emit_location_p (gimple g)
774
{
775
  return gimple_plf (g, GF_PLF_1);
776
}
777
 
778
/* Mark statement G so a location will not be emitted by
779
   annotate_one_with_location.  */
780
 
781
static inline void
782
gimple_set_do_not_emit_location (gimple g)
783
{
784
  /* The PLF flags are initialized to 0 when a new tuple is created,
785
     so no need to initialize it anywhere.  */
786
  gimple_set_plf (g, GF_PLF_1, true);
787
}
788
 
789
/* Set the location for gimple statement GS to LOCATION.  */
790
 
791
static void
792
annotate_one_with_location (gimple gs, location_t location)
793
{
794
  if (!gimple_has_location (gs)
795
      && !gimple_do_not_emit_location_p (gs)
796
      && should_carry_location_p (gs))
797
    gimple_set_location (gs, location);
798
}
799
 
800
/* Set LOCATION for all the statements after iterator GSI in sequence
801
   SEQ.  If GSI is pointing to the end of the sequence, start with the
802
   first statement in SEQ.  */
803
 
804
static void
805
annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
806
                                  location_t location)
807
{
808
  if (gsi_end_p (gsi))
809
    gsi = gsi_start (seq);
810
  else
811
    gsi_next (&gsi);
812
 
813
  for (; !gsi_end_p (gsi); gsi_next (&gsi))
814
    annotate_one_with_location (gsi_stmt (gsi), location);
815
}
816
 
817
/* Set the location for all the statements in a sequence STMT_P to LOCATION.  */
818
 
819
void
820
annotate_all_with_location (gimple_seq stmt_p, location_t location)
821
{
822
  gimple_stmt_iterator i;
823
 
824
  if (gimple_seq_empty_p (stmt_p))
825
    return;
826
 
827
  for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
828
    {
829
      gimple gs = gsi_stmt (i);
830
      annotate_one_with_location (gs, location);
831
    }
832
}
833
 
834
/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
835
   nodes that are referenced more than once in GENERIC functions.  This is
836
   necessary because gimplification (translation into GIMPLE) is performed
837
   by modifying tree nodes in-place, so gimplication of a shared node in a
838
   first context could generate an invalid GIMPLE form in a second context.
839
 
840
   This is achieved with a simple mark/copy/unmark algorithm that walks the
841
   GENERIC representation top-down, marks nodes with TREE_VISITED the first
842
   time it encounters them, duplicates them if they already have TREE_VISITED
843
   set, and finally removes the TREE_VISITED marks it has set.
844
 
845
   The algorithm works only at the function level, i.e. it generates a GENERIC
846
   representation of a function with no nodes shared within the function when
847
   passed a GENERIC function (except for nodes that are allowed to be shared).
848
 
849
   At the global level, it is also necessary to unshare tree nodes that are
850
   referenced in more than one function, for the same aforementioned reason.
851
   This requires some cooperation from the front-end.  There are 2 strategies:
852
 
853
     1. Manual unsharing.  The front-end needs to call unshare_expr on every
854
        expression that might end up being shared across functions.
855
 
856
     2. Deep unsharing.  This is an extension of regular unsharing.  Instead
857
        of calling unshare_expr on expressions that might be shared across
858
        functions, the front-end pre-marks them with TREE_VISITED.  This will
859
        ensure that they are unshared on the first reference within functions
860
        when the regular unsharing algorithm runs.  The counterpart is that
861
        this algorithm must look deeper than for manual unsharing, which is
862
        specified by LANG_HOOKS_DEEP_UNSHARING.
863
 
864
  If there are only few specific cases of node sharing across functions, it is
865
  probably easier for a front-end to unshare the expressions manually.  On the
866
  contrary, if the expressions generated at the global level are as widespread
867
  as expressions generated within functions, deep unsharing is very likely the
868
  way to go.  */
869
 
870
/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
871
   These nodes model computations that must be done once.  If we were to
872
   unshare something like SAVE_EXPR(i++), the gimplification process would
873
   create wrong code.  However, if DATA is non-null, it must hold a pointer
874
   set that is used to unshare the subtrees of these nodes.  */
875
 
876
static tree
877
mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
878
{
879
  tree t = *tp;
880
  enum tree_code code = TREE_CODE (t);
881
 
882
  /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
883
     copy their subtrees if we can make sure to do it only once.  */
884
  if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
885
    {
886
      if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
887
        ;
888
      else
889
        *walk_subtrees = 0;
890
    }
891
 
892
  /* Stop at types, decls, constants like copy_tree_r.  */
893
  else if (TREE_CODE_CLASS (code) == tcc_type
894
           || TREE_CODE_CLASS (code) == tcc_declaration
895
           || TREE_CODE_CLASS (code) == tcc_constant
896
           /* We can't do anything sensible with a BLOCK used as an
897
              expression, but we also can't just die when we see it
898
              because of non-expression uses.  So we avert our eyes
899
              and cross our fingers.  Silly Java.  */
900
           || code == BLOCK)
901
    *walk_subtrees = 0;
902
 
903
  /* Cope with the statement expression extension.  */
904
  else if (code == STATEMENT_LIST)
905
    ;
906
 
907
  /* Leave the bulk of the work to copy_tree_r itself.  */
908
  else
909
    copy_tree_r (tp, walk_subtrees, NULL);
910
 
911
  return NULL_TREE;
912
}
913
 
914
/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
915
   If *TP has been visited already, then *TP is deeply copied by calling
916
   mostly_copy_tree_r.  DATA is passed to mostly_copy_tree_r unmodified.  */
917
 
918
static tree
919
copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
920
{
921
  tree t = *tp;
922
  enum tree_code code = TREE_CODE (t);
923
 
924
  /* Skip types, decls, and constants.  But we do want to look at their
925
     types and the bounds of types.  Mark them as visited so we properly
926
     unmark their subtrees on the unmark pass.  If we've already seen them,
927
     don't look down further.  */
928
  if (TREE_CODE_CLASS (code) == tcc_type
929
      || TREE_CODE_CLASS (code) == tcc_declaration
930
      || TREE_CODE_CLASS (code) == tcc_constant)
931
    {
932
      if (TREE_VISITED (t))
933
        *walk_subtrees = 0;
934
      else
935
        TREE_VISITED (t) = 1;
936
    }
937
 
938
  /* If this node has been visited already, unshare it and don't look
939
     any deeper.  */
940
  else if (TREE_VISITED (t))
941
    {
942
      walk_tree (tp, mostly_copy_tree_r, data, NULL);
943
      *walk_subtrees = 0;
944
    }
945
 
946
  /* Otherwise, mark the node as visited and keep looking.  */
947
  else
948
    TREE_VISITED (t) = 1;
949
 
950
  return NULL_TREE;
951
}
952
 
953
/* Unshare most of the shared trees rooted at *TP.  DATA is passed to the
954
   copy_if_shared_r callback unmodified.  */
955
 
956
static inline void
957
copy_if_shared (tree *tp, void *data)
958
{
959
  walk_tree (tp, copy_if_shared_r, data, NULL);
960
}
961
 
962
/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
963
   any nested functions.  */
964
 
965
static void
966
unshare_body (tree fndecl)
967
{
968
  struct cgraph_node *cgn = cgraph_get_node (fndecl);
969
  /* If the language requires deep unsharing, we need a pointer set to make
970
     sure we don't repeatedly unshare subtrees of unshareable nodes.  */
971
  struct pointer_set_t *visited
972
    = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
973
 
974
  copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
975
  copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
976
  copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
977
 
978
  if (visited)
979
    pointer_set_destroy (visited);
980
 
981
  if (cgn)
982
    for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
983
      unshare_body (cgn->decl);
984
}
985
 
986
/* Callback for walk_tree to unmark the visited trees rooted at *TP.
987
   Subtrees are walked until the first unvisited node is encountered.  */
988
 
989
static tree
990
unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
991
{
992
  tree t = *tp;
993
 
994
  /* If this node has been visited, unmark it and keep looking.  */
995
  if (TREE_VISITED (t))
996
    TREE_VISITED (t) = 0;
997
 
998
  /* Otherwise, don't look any deeper.  */
999
  else
1000
    *walk_subtrees = 0;
1001
 
1002
  return NULL_TREE;
1003
}
1004
 
1005
/* Unmark the visited trees rooted at *TP.  */
1006
 
1007
static inline void
1008
unmark_visited (tree *tp)
1009
{
1010
  walk_tree (tp, unmark_visited_r, NULL, NULL);
1011
}
1012
 
1013
/* Likewise, but mark all trees as not visited.  */
1014
 
1015
static void
1016
unvisit_body (tree fndecl)
1017
{
1018
  struct cgraph_node *cgn = cgraph_get_node (fndecl);
1019
 
1020
  unmark_visited (&DECL_SAVED_TREE (fndecl));
1021
  unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1022
  unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1023
 
1024
  if (cgn)
1025
    for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1026
      unvisit_body (cgn->decl);
1027
}
1028
 
1029
/* Unconditionally make an unshared copy of EXPR.  This is used when using
1030
   stored expressions which span multiple functions, such as BINFO_VTABLE,
1031
   as the normal unsharing process can't tell that they're shared.  */
1032
 
1033
tree
1034
unshare_expr (tree expr)
1035
{
1036
  walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1037
  return expr;
1038
}
1039
 
1040
/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1041
   contain statements and have a value.  Assign its value to a temporary
1042
   and give it void_type_node.  Return the temporary, or NULL_TREE if
1043
   WRAPPER was already void.  */
1044
 
1045
tree
1046
voidify_wrapper_expr (tree wrapper, tree temp)
1047
{
1048
  tree type = TREE_TYPE (wrapper);
1049
  if (type && !VOID_TYPE_P (type))
1050
    {
1051
      tree *p;
1052
 
1053
      /* Set p to point to the body of the wrapper.  Loop until we find
1054
         something that isn't a wrapper.  */
1055
      for (p = &wrapper; p && *p; )
1056
        {
1057
          switch (TREE_CODE (*p))
1058
            {
1059
            case BIND_EXPR:
1060
              TREE_SIDE_EFFECTS (*p) = 1;
1061
              TREE_TYPE (*p) = void_type_node;
1062
              /* For a BIND_EXPR, the body is operand 1.  */
1063
              p = &BIND_EXPR_BODY (*p);
1064
              break;
1065
 
1066
            case CLEANUP_POINT_EXPR:
1067
            case TRY_FINALLY_EXPR:
1068
            case TRY_CATCH_EXPR:
1069
              TREE_SIDE_EFFECTS (*p) = 1;
1070
              TREE_TYPE (*p) = void_type_node;
1071
              p = &TREE_OPERAND (*p, 0);
1072
              break;
1073
 
1074
            case STATEMENT_LIST:
1075
              {
1076
                tree_stmt_iterator i = tsi_last (*p);
1077
                TREE_SIDE_EFFECTS (*p) = 1;
1078
                TREE_TYPE (*p) = void_type_node;
1079
                p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1080
              }
1081
              break;
1082
 
1083
            case COMPOUND_EXPR:
1084
              /* Advance to the last statement.  Set all container types to
1085
                 void.  */
1086
              for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1087
                {
1088
                  TREE_SIDE_EFFECTS (*p) = 1;
1089
                  TREE_TYPE (*p) = void_type_node;
1090
                }
1091
              break;
1092
 
1093
            case TRANSACTION_EXPR:
1094
              TREE_SIDE_EFFECTS (*p) = 1;
1095
              TREE_TYPE (*p) = void_type_node;
1096
              p = &TRANSACTION_EXPR_BODY (*p);
1097
              break;
1098
 
1099
            default:
1100
              /* Assume that any tree upon which voidify_wrapper_expr is
1101
                 directly called is a wrapper, and that its body is op0.  */
1102
              if (p == &wrapper)
1103
                {
1104
                  TREE_SIDE_EFFECTS (*p) = 1;
1105
                  TREE_TYPE (*p) = void_type_node;
1106
                  p = &TREE_OPERAND (*p, 0);
1107
                  break;
1108
                }
1109
              goto out;
1110
            }
1111
        }
1112
 
1113
    out:
1114
      if (p == NULL || IS_EMPTY_STMT (*p))
1115
        temp = NULL_TREE;
1116
      else if (temp)
1117
        {
1118
          /* The wrapper is on the RHS of an assignment that we're pushing
1119
             down.  */
1120
          gcc_assert (TREE_CODE (temp) == INIT_EXPR
1121
                      || TREE_CODE (temp) == MODIFY_EXPR);
1122
          TREE_OPERAND (temp, 1) = *p;
1123
          *p = temp;
1124
        }
1125
      else
1126
        {
1127
          temp = create_tmp_var (type, "retval");
1128
          *p = build2 (INIT_EXPR, type, temp, *p);
1129
        }
1130
 
1131
      return temp;
1132
    }
1133
 
1134
  return NULL_TREE;
1135
}
1136
 
1137
/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1138
   a temporary through which they communicate.  */
1139
 
1140
static void
1141
build_stack_save_restore (gimple *save, gimple *restore)
1142
{
1143
  tree tmp_var;
1144
 
1145
  *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1146
  tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1147
  gimple_call_set_lhs (*save, tmp_var);
1148
 
1149
  *restore
1150
    = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1151
                         1, tmp_var);
1152
}
1153
 
1154
/* Gimplify a BIND_EXPR.  Just voidify and recurse.  */
1155
 
1156
static enum gimplify_status
1157
gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1158
{
1159
  tree bind_expr = *expr_p;
1160
  bool old_save_stack = gimplify_ctxp->save_stack;
1161
  tree t;
1162
  gimple gimple_bind;
1163
  gimple_seq body, cleanup;
1164
  gimple stack_save;
1165
 
1166
  tree temp = voidify_wrapper_expr (bind_expr, NULL);
1167
 
1168
  /* Mark variables seen in this bind expr.  */
1169
  for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1170
    {
1171
      if (TREE_CODE (t) == VAR_DECL)
1172
        {
1173
          struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1174
 
1175
          /* Mark variable as local.  */
1176
          if (ctx && !DECL_EXTERNAL (t)
1177
              && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1178
                  || splay_tree_lookup (ctx->variables,
1179
                                        (splay_tree_key) t) == NULL))
1180
            omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1181
 
1182
          DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1183
 
1184
          if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1185
            cfun->has_local_explicit_reg_vars = true;
1186
        }
1187
 
1188
      /* Preliminarily mark non-addressed complex variables as eligible
1189
         for promotion to gimple registers.  We'll transform their uses
1190
         as we find them.  */
1191
      if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1192
           || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1193
          && !TREE_THIS_VOLATILE (t)
1194
          && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1195
          && !needs_to_live_in_memory (t))
1196
        DECL_GIMPLE_REG_P (t) = 1;
1197
    }
1198
 
1199
  gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1200
                                   BIND_EXPR_BLOCK (bind_expr));
1201
  gimple_push_bind_expr (gimple_bind);
1202
 
1203
  gimplify_ctxp->save_stack = false;
1204
 
1205
  /* Gimplify the body into the GIMPLE_BIND tuple's body.  */
1206
  body = NULL;
1207
  gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1208
  gimple_bind_set_body (gimple_bind, body);
1209
 
1210
  cleanup = NULL;
1211
  stack_save = NULL;
1212
  if (gimplify_ctxp->save_stack)
1213
    {
1214
      gimple stack_restore;
1215
 
1216
      /* Save stack on entry and restore it on exit.  Add a try_finally
1217
         block to achieve this.  Note that mudflap depends on the
1218
         format of the emitted code: see mx_register_decls().  */
1219
      build_stack_save_restore (&stack_save, &stack_restore);
1220
 
1221
      gimplify_seq_add_stmt (&cleanup, stack_restore);
1222
    }
1223
 
1224
  /* Add clobbers for all variables that go out of scope.  */
1225
  for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1226
    {
1227
      if (TREE_CODE (t) == VAR_DECL
1228
          && !is_global_var (t)
1229
          && DECL_CONTEXT (t) == current_function_decl
1230
          && !DECL_HARD_REGISTER (t)
1231
          && !TREE_THIS_VOLATILE (t)
1232
          && !DECL_HAS_VALUE_EXPR_P (t)
1233
          /* Only care for variables that have to be in memory.  Others
1234
             will be rewritten into SSA names, hence moved to the top-level.  */
1235
          && !is_gimple_reg (t))
1236
        {
1237
          tree clobber = build_constructor (TREE_TYPE (t), NULL);
1238
          TREE_THIS_VOLATILE (clobber) = 1;
1239
          gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1240
        }
1241
    }
1242
 
1243
  if (cleanup)
1244
    {
1245
      gimple gs;
1246
      gimple_seq new_body;
1247
 
1248
      new_body = NULL;
1249
      gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1250
                             GIMPLE_TRY_FINALLY);
1251
 
1252
      if (stack_save)
1253
        gimplify_seq_add_stmt (&new_body, stack_save);
1254
      gimplify_seq_add_stmt (&new_body, gs);
1255
      gimple_bind_set_body (gimple_bind, new_body);
1256
    }
1257
 
1258
  gimplify_ctxp->save_stack = old_save_stack;
1259
  gimple_pop_bind_expr ();
1260
 
1261
  gimplify_seq_add_stmt (pre_p, gimple_bind);
1262
 
1263
  if (temp)
1264
    {
1265
      *expr_p = temp;
1266
      return GS_OK;
1267
    }
1268
 
1269
  *expr_p = NULL_TREE;
1270
  return GS_ALL_DONE;
1271
}
1272
 
1273
/* Gimplify a RETURN_EXPR.  If the expression to be returned is not a
1274
   GIMPLE value, it is assigned to a new temporary and the statement is
1275
   re-written to return the temporary.
1276
 
1277
   PRE_P points to the sequence where side effects that must happen before
1278
   STMT should be stored.  */
1279
 
1280
static enum gimplify_status
1281
gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1282
{
1283
  gimple ret;
1284
  tree ret_expr = TREE_OPERAND (stmt, 0);
1285
  tree result_decl, result;
1286
 
1287
  if (ret_expr == error_mark_node)
1288
    return GS_ERROR;
1289
 
1290
  if (!ret_expr
1291
      || TREE_CODE (ret_expr) == RESULT_DECL
1292
      || ret_expr == error_mark_node)
1293
    {
1294
      gimple ret = gimple_build_return (ret_expr);
1295
      gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1296
      gimplify_seq_add_stmt (pre_p, ret);
1297
      return GS_ALL_DONE;
1298
    }
1299
 
1300
  if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1301
    result_decl = NULL_TREE;
1302
  else
1303
    {
1304
      result_decl = TREE_OPERAND (ret_expr, 0);
1305
 
1306
      /* See through a return by reference.  */
1307
      if (TREE_CODE (result_decl) == INDIRECT_REF)
1308
        result_decl = TREE_OPERAND (result_decl, 0);
1309
 
1310
      gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1311
                   || TREE_CODE (ret_expr) == INIT_EXPR)
1312
                  && TREE_CODE (result_decl) == RESULT_DECL);
1313
    }
1314
 
1315
  /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1316
     Recall that aggregate_value_p is FALSE for any aggregate type that is
1317
     returned in registers.  If we're returning values in registers, then
1318
     we don't want to extend the lifetime of the RESULT_DECL, particularly
1319
     across another call.  In addition, for those aggregates for which
1320
     hard_function_value generates a PARALLEL, we'll die during normal
1321
     expansion of structure assignments; there's special code in expand_return
1322
     to handle this case that does not exist in expand_expr.  */
1323
  if (!result_decl)
1324
    result = NULL_TREE;
1325
  else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1326
    {
1327
      if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1328
        {
1329
          if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1330
            gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1331
          /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1332
             should be effectively allocated by the caller, i.e. all calls to
1333
             this function must be subject to the Return Slot Optimization.  */
1334
          gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1335
          gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1336
        }
1337
      result = result_decl;
1338
    }
1339
  else if (gimplify_ctxp->return_temp)
1340
    result = gimplify_ctxp->return_temp;
1341
  else
1342
    {
1343
      result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1344
 
1345
      /* ??? With complex control flow (usually involving abnormal edges),
1346
         we can wind up warning about an uninitialized value for this.  Due
1347
         to how this variable is constructed and initialized, this is never
1348
         true.  Give up and never warn.  */
1349
      TREE_NO_WARNING (result) = 1;
1350
 
1351
      gimplify_ctxp->return_temp = result;
1352
    }
1353
 
1354
  /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1355
     Then gimplify the whole thing.  */
1356
  if (result != result_decl)
1357
    TREE_OPERAND (ret_expr, 0) = result;
1358
 
1359
  gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1360
 
1361
  ret = gimple_build_return (result);
1362
  gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1363
  gimplify_seq_add_stmt (pre_p, ret);
1364
 
1365
  return GS_ALL_DONE;
1366
}
1367
 
1368
/* Gimplify a variable-length array DECL.  */
1369
 
1370
static void
1371
gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1372
{
1373
  /* This is a variable-sized decl.  Simplify its size and mark it
1374
     for deferred expansion.  Note that mudflap depends on the format
1375
     of the emitted code: see mx_register_decls().  */
1376
  tree t, addr, ptr_type;
1377
 
1378
  gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1379
  gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1380
 
1381
  /* All occurrences of this decl in final gimplified code will be
1382
     replaced by indirection.  Setting DECL_VALUE_EXPR does two
1383
     things: First, it lets the rest of the gimplifier know what
1384
     replacement to use.  Second, it lets the debug info know
1385
     where to find the value.  */
1386
  ptr_type = build_pointer_type (TREE_TYPE (decl));
1387
  addr = create_tmp_var (ptr_type, get_name (decl));
1388
  DECL_IGNORED_P (addr) = 0;
1389
  t = build_fold_indirect_ref (addr);
1390
  TREE_THIS_NOTRAP (t) = 1;
1391
  SET_DECL_VALUE_EXPR (decl, t);
1392
  DECL_HAS_VALUE_EXPR_P (decl) = 1;
1393
 
1394
  t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1395
  t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1396
                       size_int (DECL_ALIGN (decl)));
1397
  /* The call has been built for a variable-sized object.  */
1398
  CALL_ALLOCA_FOR_VAR_P (t) = 1;
1399
  t = fold_convert (ptr_type, t);
1400
  t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1401
 
1402
  gimplify_and_add (t, seq_p);
1403
 
1404
  /* Indicate that we need to restore the stack level when the
1405
     enclosing BIND_EXPR is exited.  */
1406
  gimplify_ctxp->save_stack = true;
1407
}
1408
 
1409
/* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1410
   and initialization explicit.  */
1411
 
1412
static enum gimplify_status
1413
gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1414
{
1415
  tree stmt = *stmt_p;
1416
  tree decl = DECL_EXPR_DECL (stmt);
1417
 
1418
  *stmt_p = NULL_TREE;
1419
 
1420
  if (TREE_TYPE (decl) == error_mark_node)
1421
    return GS_ERROR;
1422
 
1423
  if ((TREE_CODE (decl) == TYPE_DECL
1424
       || TREE_CODE (decl) == VAR_DECL)
1425
      && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1426
    gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1427
 
1428
  if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1429
    {
1430
      tree init = DECL_INITIAL (decl);
1431
 
1432
      if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1433
          || (!TREE_STATIC (decl)
1434
              && flag_stack_check == GENERIC_STACK_CHECK
1435
              && compare_tree_int (DECL_SIZE_UNIT (decl),
1436
                                   STACK_CHECK_MAX_VAR_SIZE) > 0))
1437
        gimplify_vla_decl (decl, seq_p);
1438
 
1439
      /* Some front ends do not explicitly declare all anonymous
1440
         artificial variables.  We compensate here by declaring the
1441
         variables, though it would be better if the front ends would
1442
         explicitly declare them.  */
1443
      if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1444
          && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1445
        gimple_add_tmp_var (decl);
1446
 
1447
      if (init && init != error_mark_node)
1448
        {
1449
          if (!TREE_STATIC (decl))
1450
            {
1451
              DECL_INITIAL (decl) = NULL_TREE;
1452
              init = build2 (INIT_EXPR, void_type_node, decl, init);
1453
              gimplify_and_add (init, seq_p);
1454
              ggc_free (init);
1455
            }
1456
          else
1457
            /* We must still examine initializers for static variables
1458
               as they may contain a label address.  */
1459
            walk_tree (&init, force_labels_r, NULL, NULL);
1460
        }
1461
    }
1462
 
1463
  return GS_ALL_DONE;
1464
}
1465
 
1466
/* Gimplify a LOOP_EXPR.  Normally this just involves gimplifying the body
1467
   and replacing the LOOP_EXPR with goto, but if the loop contains an
1468
   EXIT_EXPR, we need to append a label for it to jump to.  */
1469
 
1470
static enum gimplify_status
1471
gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1472
{
1473
  tree saved_label = gimplify_ctxp->exit_label;
1474
  tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1475
 
1476
  gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1477
 
1478
  gimplify_ctxp->exit_label = NULL_TREE;
1479
 
1480
  gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1481
 
1482
  gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1483
 
1484
  if (gimplify_ctxp->exit_label)
1485
    gimplify_seq_add_stmt (pre_p,
1486
                           gimple_build_label (gimplify_ctxp->exit_label));
1487
 
1488
  gimplify_ctxp->exit_label = saved_label;
1489
 
1490
  *expr_p = NULL;
1491
  return GS_ALL_DONE;
1492
}
1493
 
1494
/* Gimplify a statement list onto a sequence.  These may be created either
1495
   by an enlightened front-end, or by shortcut_cond_expr.  */
1496
 
1497
static enum gimplify_status
1498
gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1499
{
1500
  tree temp = voidify_wrapper_expr (*expr_p, NULL);
1501
 
1502
  tree_stmt_iterator i = tsi_start (*expr_p);
1503
 
1504
  while (!tsi_end_p (i))
1505
    {
1506
      gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1507
      tsi_delink (&i);
1508
    }
1509
 
1510
  if (temp)
1511
    {
1512
      *expr_p = temp;
1513
      return GS_OK;
1514
    }
1515
 
1516
  return GS_ALL_DONE;
1517
}
1518
 
1519
/* Compare two case labels.  Because the front end should already have
1520
   made sure that case ranges do not overlap, it is enough to only compare
1521
   the CASE_LOW values of each case label.  */
1522
 
1523
static int
1524
compare_case_labels (const void *p1, const void *p2)
1525
{
1526
  const_tree const case1 = *(const_tree const*)p1;
1527
  const_tree const case2 = *(const_tree const*)p2;
1528
 
1529
  /* The 'default' case label always goes first.  */
1530
  if (!CASE_LOW (case1))
1531
    return -1;
1532
  else if (!CASE_LOW (case2))
1533
    return 1;
1534
  else
1535
    return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1536
}
1537
 
1538
/* Sort the case labels in LABEL_VEC in place in ascending order.  */
1539
 
1540
void
1541
sort_case_labels (VEC(tree,heap)* label_vec)
1542
{
1543
  VEC_qsort (tree, label_vec, compare_case_labels);
1544
}
1545
 
1546
/* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1547
   branch to.  */
1548
 
1549
static enum gimplify_status
1550
gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1551
{
1552
  tree switch_expr = *expr_p;
1553
  gimple_seq switch_body_seq = NULL;
1554
  enum gimplify_status ret;
1555
 
1556
  ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1557
                       fb_rvalue);
1558
  if (ret == GS_ERROR || ret == GS_UNHANDLED)
1559
    return ret;
1560
 
1561
  if (SWITCH_BODY (switch_expr))
1562
    {
1563
      VEC (tree,heap) *labels;
1564
      VEC (tree,heap) *saved_labels;
1565
      tree default_case = NULL_TREE;
1566
      size_t i, len;
1567
      gimple gimple_switch;
1568
 
1569
      /* If someone can be bothered to fill in the labels, they can
1570
         be bothered to null out the body too.  */
1571
      gcc_assert (!SWITCH_LABELS (switch_expr));
1572
 
1573
      /* save old labels, get new ones from body, then restore the old
1574
         labels.  Save all the things from the switch body to append after.  */
1575
      saved_labels = gimplify_ctxp->case_labels;
1576
      gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1577
 
1578
      gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1579
      labels = gimplify_ctxp->case_labels;
1580
      gimplify_ctxp->case_labels = saved_labels;
1581
 
1582
      i = 0;
1583
      while (i < VEC_length (tree, labels))
1584
        {
1585
          tree elt = VEC_index (tree, labels, i);
1586
          tree low = CASE_LOW (elt);
1587
          bool remove_element = FALSE;
1588
 
1589
          if (low)
1590
            {
1591
              /* Discard empty ranges.  */
1592
              tree high = CASE_HIGH (elt);
1593
              if (high && tree_int_cst_lt (high, low))
1594
                remove_element = TRUE;
1595
            }
1596
          else
1597
            {
1598
              /* The default case must be the last label in the list.  */
1599
              gcc_assert (!default_case);
1600
              default_case = elt;
1601
              remove_element = TRUE;
1602
            }
1603
 
1604
          if (remove_element)
1605
            VEC_ordered_remove (tree, labels, i);
1606
          else
1607
            i++;
1608
        }
1609
      len = i;
1610
 
1611
      if (!VEC_empty (tree, labels))
1612
        sort_case_labels (labels);
1613
 
1614
      if (!default_case)
1615
        {
1616
          tree type = TREE_TYPE (switch_expr);
1617
 
1618
          /* If the switch has no default label, add one, so that we jump
1619
             around the switch body.  If the labels already cover the whole
1620
             range of type, add the default label pointing to one of the
1621
             existing labels.  */
1622
          if (type == void_type_node)
1623
            type = TREE_TYPE (SWITCH_COND (switch_expr));
1624
          if (len
1625
              && INTEGRAL_TYPE_P (type)
1626
              && TYPE_MIN_VALUE (type)
1627
              && TYPE_MAX_VALUE (type)
1628
              && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1629
                                     TYPE_MIN_VALUE (type)))
1630
            {
1631
              tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1632
              if (!high)
1633
                high = CASE_LOW (VEC_index (tree, labels, len - 1));
1634
              if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type)))
1635
                {
1636
                  for (i = 1; i < len; i++)
1637
                    {
1638
                      high = CASE_LOW (VEC_index (tree, labels, i));
1639
                      low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1640
                      if (!low)
1641
                        low = CASE_LOW (VEC_index (tree, labels, i - 1));
1642
                      if ((TREE_INT_CST_LOW (low) + 1
1643
                           != TREE_INT_CST_LOW (high))
1644
                          || (TREE_INT_CST_HIGH (low)
1645
                              + (TREE_INT_CST_LOW (high) == 0)
1646
                              != TREE_INT_CST_HIGH (high)))
1647
                        break;
1648
                    }
1649
                  if (i == len)
1650
                    {
1651
                      tree label = CASE_LABEL (VEC_index (tree, labels, 0));
1652
                      default_case = build_case_label (NULL_TREE, NULL_TREE,
1653
                                                       label);
1654
                    }
1655
                }
1656
            }
1657
 
1658
          if (!default_case)
1659
            {
1660
              gimple new_default;
1661
 
1662
              default_case
1663
                = build_case_label (NULL_TREE, NULL_TREE,
1664
                                    create_artificial_label (UNKNOWN_LOCATION));
1665
              new_default = gimple_build_label (CASE_LABEL (default_case));
1666
              gimplify_seq_add_stmt (&switch_body_seq, new_default);
1667
            }
1668
        }
1669
 
1670
      gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1671
                                               default_case, labels);
1672
      gimplify_seq_add_stmt (pre_p, gimple_switch);
1673
      gimplify_seq_add_seq (pre_p, switch_body_seq);
1674
      VEC_free(tree, heap, labels);
1675
    }
1676
  else
1677
    gcc_assert (SWITCH_LABELS (switch_expr));
1678
 
1679
  return GS_ALL_DONE;
1680
}
1681
 
1682
/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P.  */
1683
 
1684
static enum gimplify_status
1685
gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1686
{
1687
  struct gimplify_ctx *ctxp;
1688
  gimple gimple_label;
1689
 
1690
  /* Invalid OpenMP programs can play Duff's Device type games with
1691
     #pragma omp parallel.  At least in the C front end, we don't
1692
     detect such invalid branches until after gimplification.  */
1693
  for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1694
    if (ctxp->case_labels)
1695
      break;
1696
 
1697
  gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1698
  VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1699
  gimplify_seq_add_stmt (pre_p, gimple_label);
1700
 
1701
  return GS_ALL_DONE;
1702
}
1703
 
1704
/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1705
   if necessary.  */
1706
 
1707
tree
1708
build_and_jump (tree *label_p)
1709
{
1710
  if (label_p == NULL)
1711
    /* If there's nowhere to jump, just fall through.  */
1712
    return NULL_TREE;
1713
 
1714
  if (*label_p == NULL_TREE)
1715
    {
1716
      tree label = create_artificial_label (UNKNOWN_LOCATION);
1717
      *label_p = label;
1718
    }
1719
 
1720
  return build1 (GOTO_EXPR, void_type_node, *label_p);
1721
}
1722
 
1723
/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1724
   This also involves building a label to jump to and communicating it to
1725
   gimplify_loop_expr through gimplify_ctxp->exit_label.  */
1726
 
1727
static enum gimplify_status
1728
gimplify_exit_expr (tree *expr_p)
1729
{
1730
  tree cond = TREE_OPERAND (*expr_p, 0);
1731
  tree expr;
1732
 
1733
  expr = build_and_jump (&gimplify_ctxp->exit_label);
1734
  expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1735
  *expr_p = expr;
1736
 
1737
  return GS_OK;
1738
}
1739
 
1740
/* A helper function to be called via walk_tree.  Mark all labels under *TP
1741
   as being forced.  To be called for DECL_INITIAL of static variables.  */
1742
 
1743
tree
1744
force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1745
{
1746
  if (TYPE_P (*tp))
1747
    *walk_subtrees = 0;
1748
  if (TREE_CODE (*tp) == LABEL_DECL)
1749
    FORCED_LABEL (*tp) = 1;
1750
 
1751
  return NULL_TREE;
1752
}
1753
 
1754
/* *EXPR_P is a COMPONENT_REF being used as an rvalue.  If its type is
1755
   different from its canonical type, wrap the whole thing inside a
1756
   NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1757
   type.
1758
 
1759
   The canonical type of a COMPONENT_REF is the type of the field being
1760
   referenced--unless the field is a bit-field which can be read directly
1761
   in a smaller mode, in which case the canonical type is the
1762
   sign-appropriate type corresponding to that mode.  */
1763
 
1764
static void
1765
canonicalize_component_ref (tree *expr_p)
1766
{
1767
  tree expr = *expr_p;
1768
  tree type;
1769
 
1770
  gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1771
 
1772
  if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1773
    type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1774
  else
1775
    type = TREE_TYPE (TREE_OPERAND (expr, 1));
1776
 
1777
  /* One could argue that all the stuff below is not necessary for
1778
     the non-bitfield case and declare it a FE error if type
1779
     adjustment would be needed.  */
1780
  if (TREE_TYPE (expr) != type)
1781
    {
1782
#ifdef ENABLE_TYPES_CHECKING
1783
      tree old_type = TREE_TYPE (expr);
1784
#endif
1785
      int type_quals;
1786
 
1787
      /* We need to preserve qualifiers and propagate them from
1788
         operand 0.  */
1789
      type_quals = TYPE_QUALS (type)
1790
        | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1791
      if (TYPE_QUALS (type) != type_quals)
1792
        type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1793
 
1794
      /* Set the type of the COMPONENT_REF to the underlying type.  */
1795
      TREE_TYPE (expr) = type;
1796
 
1797
#ifdef ENABLE_TYPES_CHECKING
1798
      /* It is now a FE error, if the conversion from the canonical
1799
         type to the original expression type is not useless.  */
1800
      gcc_assert (useless_type_conversion_p (old_type, type));
1801
#endif
1802
    }
1803
}
1804
 
1805
/* If a NOP conversion is changing a pointer to array of foo to a pointer
1806
   to foo, embed that change in the ADDR_EXPR by converting
1807
      T array[U];
1808
      (T *)&array
1809
   ==>
1810
      &array[L]
1811
   where L is the lower bound.  For simplicity, only do this for constant
1812
   lower bound.
1813
   The constraint is that the type of &array[L] is trivially convertible
1814
   to T *.  */
1815
 
1816
static void
1817
canonicalize_addr_expr (tree *expr_p)
1818
{
1819
  tree expr = *expr_p;
1820
  tree addr_expr = TREE_OPERAND (expr, 0);
1821
  tree datype, ddatype, pddatype;
1822
 
1823
  /* We simplify only conversions from an ADDR_EXPR to a pointer type.  */
1824
  if (!POINTER_TYPE_P (TREE_TYPE (expr))
1825
      || TREE_CODE (addr_expr) != ADDR_EXPR)
1826
    return;
1827
 
1828
  /* The addr_expr type should be a pointer to an array.  */
1829
  datype = TREE_TYPE (TREE_TYPE (addr_expr));
1830
  if (TREE_CODE (datype) != ARRAY_TYPE)
1831
    return;
1832
 
1833
  /* The pointer to element type shall be trivially convertible to
1834
     the expression pointer type.  */
1835
  ddatype = TREE_TYPE (datype);
1836
  pddatype = build_pointer_type (ddatype);
1837
  if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1838
                                  pddatype))
1839
    return;
1840
 
1841
  /* The lower bound and element sizes must be constant.  */
1842
  if (!TYPE_SIZE_UNIT (ddatype)
1843
      || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1844
      || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1845
      || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1846
    return;
1847
 
1848
  /* All checks succeeded.  Build a new node to merge the cast.  */
1849
  *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1850
                    TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1851
                    NULL_TREE, NULL_TREE);
1852
  *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1853
 
1854
  /* We can have stripped a required restrict qualifier above.  */
1855
  if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1856
    *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1857
}
1858
 
1859
/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR.  Remove it and/or other conversions
1860
   underneath as appropriate.  */
1861
 
1862
static enum gimplify_status
1863
gimplify_conversion (tree *expr_p)
1864
{
1865
  location_t loc = EXPR_LOCATION (*expr_p);
1866
  gcc_assert (CONVERT_EXPR_P (*expr_p));
1867
 
1868
  /* Then strip away all but the outermost conversion.  */
1869
  STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1870
 
1871
  /* And remove the outermost conversion if it's useless.  */
1872
  if (tree_ssa_useless_type_conversion (*expr_p))
1873
    *expr_p = TREE_OPERAND (*expr_p, 0);
1874
 
1875
  /* If we still have a conversion at the toplevel,
1876
     then canonicalize some constructs.  */
1877
  if (CONVERT_EXPR_P (*expr_p))
1878
    {
1879
      tree sub = TREE_OPERAND (*expr_p, 0);
1880
 
1881
      /* If a NOP conversion is changing the type of a COMPONENT_REF
1882
         expression, then canonicalize its type now in order to expose more
1883
         redundant conversions.  */
1884
      if (TREE_CODE (sub) == COMPONENT_REF)
1885
        canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1886
 
1887
      /* If a NOP conversion is changing a pointer to array of foo
1888
         to a pointer to foo, embed that change in the ADDR_EXPR.  */
1889
      else if (TREE_CODE (sub) == ADDR_EXPR)
1890
        canonicalize_addr_expr (expr_p);
1891
    }
1892
 
1893
  /* If we have a conversion to a non-register type force the
1894
     use of a VIEW_CONVERT_EXPR instead.  */
1895
  if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1896
    *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1897
                               TREE_OPERAND (*expr_p, 0));
1898
 
1899
  return GS_OK;
1900
}
1901
 
1902
/* Nonlocal VLAs seen in the current function.  */
1903
static struct pointer_set_t *nonlocal_vlas;
1904
 
1905
/* Gimplify a VAR_DECL or PARM_DECL.  Return GS_OK if we expanded a
1906
   DECL_VALUE_EXPR, and it's worth re-examining things.  */
1907
 
1908
static enum gimplify_status
1909
gimplify_var_or_parm_decl (tree *expr_p)
1910
{
1911
  tree decl = *expr_p;
1912
 
1913
  /* ??? If this is a local variable, and it has not been seen in any
1914
     outer BIND_EXPR, then it's probably the result of a duplicate
1915
     declaration, for which we've already issued an error.  It would
1916
     be really nice if the front end wouldn't leak these at all.
1917
     Currently the only known culprit is C++ destructors, as seen
1918
     in g++.old-deja/g++.jason/binding.C.  */
1919
  if (TREE_CODE (decl) == VAR_DECL
1920
      && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1921
      && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1922
      && decl_function_context (decl) == current_function_decl)
1923
    {
1924
      gcc_assert (seen_error ());
1925
      return GS_ERROR;
1926
    }
1927
 
1928
  /* When within an OpenMP context, notice uses of variables.  */
1929
  if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1930
    return GS_ALL_DONE;
1931
 
1932
  /* If the decl is an alias for another expression, substitute it now.  */
1933
  if (DECL_HAS_VALUE_EXPR_P (decl))
1934
    {
1935
      tree value_expr = DECL_VALUE_EXPR (decl);
1936
 
1937
      /* For referenced nonlocal VLAs add a decl for debugging purposes
1938
         to the current function.  */
1939
      if (TREE_CODE (decl) == VAR_DECL
1940
          && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1941
          && nonlocal_vlas != NULL
1942
          && TREE_CODE (value_expr) == INDIRECT_REF
1943
          && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1944
          && decl_function_context (decl) != current_function_decl)
1945
        {
1946
          struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1947
          while (ctx && ctx->region_type == ORT_WORKSHARE)
1948
            ctx = ctx->outer_context;
1949
          if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1950
            {
1951
              tree copy = copy_node (decl), block;
1952
 
1953
              lang_hooks.dup_lang_specific_decl (copy);
1954
              SET_DECL_RTL (copy, 0);
1955
              TREE_USED (copy) = 1;
1956
              block = DECL_INITIAL (current_function_decl);
1957
              DECL_CHAIN (copy) = BLOCK_VARS (block);
1958
              BLOCK_VARS (block) = copy;
1959
              SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1960
              DECL_HAS_VALUE_EXPR_P (copy) = 1;
1961
            }
1962
        }
1963
 
1964
      *expr_p = unshare_expr (value_expr);
1965
      return GS_OK;
1966
    }
1967
 
1968
  return GS_ALL_DONE;
1969
}
1970
 
1971
/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1972
   node *EXPR_P.
1973
 
1974
      compound_lval
1975
              : min_lval '[' val ']'
1976
              | min_lval '.' ID
1977
              | compound_lval '[' val ']'
1978
              | compound_lval '.' ID
1979
 
1980
   This is not part of the original SIMPLE definition, which separates
1981
   array and member references, but it seems reasonable to handle them
1982
   together.  Also, this way we don't run into problems with union
1983
   aliasing; gcc requires that for accesses through a union to alias, the
1984
   union reference must be explicit, which was not always the case when we
1985
   were splitting up array and member refs.
1986
 
1987
   PRE_P points to the sequence where side effects that must happen before
1988
     *EXPR_P should be stored.
1989
 
1990
   POST_P points to the sequence where side effects that must happen after
1991
     *EXPR_P should be stored.  */
1992
 
1993
static enum gimplify_status
1994
gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1995
                        fallback_t fallback)
1996
{
1997
  tree *p;
1998
  VEC(tree,heap) *stack;
1999
  enum gimplify_status ret = GS_ALL_DONE, tret;
2000
  int i;
2001
  location_t loc = EXPR_LOCATION (*expr_p);
2002
  tree expr = *expr_p;
2003
 
2004
  /* Create a stack of the subexpressions so later we can walk them in
2005
     order from inner to outer.  */
2006
  stack = VEC_alloc (tree, heap, 10);
2007
 
2008
  /* We can handle anything that get_inner_reference can deal with.  */
2009
  for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2010
    {
2011
    restart:
2012
      /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs.  */
2013
      if (TREE_CODE (*p) == INDIRECT_REF)
2014
        *p = fold_indirect_ref_loc (loc, *p);
2015
 
2016
      if (handled_component_p (*p))
2017
        ;
2018
      /* Expand DECL_VALUE_EXPR now.  In some cases that may expose
2019
         additional COMPONENT_REFs.  */
2020
      else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
2021
               && gimplify_var_or_parm_decl (p) == GS_OK)
2022
        goto restart;
2023
      else
2024
        break;
2025
 
2026
      VEC_safe_push (tree, heap, stack, *p);
2027
    }
2028
 
2029
  gcc_assert (VEC_length (tree, stack));
2030
 
2031
  /* Now STACK is a stack of pointers to all the refs we've walked through
2032
     and P points to the innermost expression.
2033
 
2034
     Java requires that we elaborated nodes in source order.  That
2035
     means we must gimplify the inner expression followed by each of
2036
     the indices, in order.  But we can't gimplify the inner
2037
     expression until we deal with any variable bounds, sizes, or
2038
     positions in order to deal with PLACEHOLDER_EXPRs.
2039
 
2040
     So we do this in three steps.  First we deal with the annotations
2041
     for any variables in the components, then we gimplify the base,
2042
     then we gimplify any indices, from left to right.  */
2043
  for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
2044
    {
2045
      tree t = VEC_index (tree, stack, i);
2046
 
2047
      if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2048
        {
2049
          /* Gimplify the low bound and element type size and put them into
2050
             the ARRAY_REF.  If these values are set, they have already been
2051
             gimplified.  */
2052
          if (TREE_OPERAND (t, 2) == NULL_TREE)
2053
            {
2054
              tree low = unshare_expr (array_ref_low_bound (t));
2055
              if (!is_gimple_min_invariant (low))
2056
                {
2057
                  TREE_OPERAND (t, 2) = low;
2058
                  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2059
                                        post_p, is_gimple_reg,
2060
                                        fb_rvalue);
2061
                  ret = MIN (ret, tret);
2062
                }
2063
            }
2064
          else
2065
            {
2066
              tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2067
                                    is_gimple_reg, fb_rvalue);
2068
              ret = MIN (ret, tret);
2069
            }
2070
 
2071
          if (TREE_OPERAND (t, 3) == NULL_TREE)
2072
            {
2073
              tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2074
              tree elmt_size = unshare_expr (array_ref_element_size (t));
2075
              tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2076
 
2077
              /* Divide the element size by the alignment of the element
2078
                 type (above).  */
2079
              elmt_size
2080
                = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2081
 
2082
              if (!is_gimple_min_invariant (elmt_size))
2083
                {
2084
                  TREE_OPERAND (t, 3) = elmt_size;
2085
                  tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2086
                                        post_p, is_gimple_reg,
2087
                                        fb_rvalue);
2088
                  ret = MIN (ret, tret);
2089
                }
2090
            }
2091
          else
2092
            {
2093
              tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2094
                                    is_gimple_reg, fb_rvalue);
2095
              ret = MIN (ret, tret);
2096
            }
2097
        }
2098
      else if (TREE_CODE (t) == COMPONENT_REF)
2099
        {
2100
          /* Set the field offset into T and gimplify it.  */
2101
          if (TREE_OPERAND (t, 2) == NULL_TREE)
2102
            {
2103
              tree offset = unshare_expr (component_ref_field_offset (t));
2104
              tree field = TREE_OPERAND (t, 1);
2105
              tree factor
2106
                = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2107
 
2108
              /* Divide the offset by its alignment.  */
2109
              offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2110
 
2111
              if (!is_gimple_min_invariant (offset))
2112
                {
2113
                  TREE_OPERAND (t, 2) = offset;
2114
                  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2115
                                        post_p, is_gimple_reg,
2116
                                        fb_rvalue);
2117
                  ret = MIN (ret, tret);
2118
                }
2119
            }
2120
          else
2121
            {
2122
              tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2123
                                    is_gimple_reg, fb_rvalue);
2124
              ret = MIN (ret, tret);
2125
            }
2126
        }
2127
    }
2128
 
2129
  /* Step 2 is to gimplify the base expression.  Make sure lvalue is set
2130
     so as to match the min_lval predicate.  Failure to do so may result
2131
     in the creation of large aggregate temporaries.  */
2132
  tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2133
                        fallback | fb_lvalue);
2134
  ret = MIN (ret, tret);
2135
 
2136
  /* And finally, the indices and operands to BIT_FIELD_REF.  During this
2137
     loop we also remove any useless conversions.  */
2138
  for (; VEC_length (tree, stack) > 0; )
2139
    {
2140
      tree t = VEC_pop (tree, stack);
2141
 
2142
      if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2143
        {
2144
          /* Gimplify the dimension.  */
2145
          if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2146
            {
2147
              tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2148
                                    is_gimple_val, fb_rvalue);
2149
              ret = MIN (ret, tret);
2150
            }
2151
        }
2152
      else if (TREE_CODE (t) == BIT_FIELD_REF)
2153
        {
2154
          tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2155
                                is_gimple_val, fb_rvalue);
2156
          ret = MIN (ret, tret);
2157
          tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2158
                                is_gimple_val, fb_rvalue);
2159
          ret = MIN (ret, tret);
2160
        }
2161
 
2162
      STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2163
 
2164
      /* The innermost expression P may have originally had
2165
         TREE_SIDE_EFFECTS set which would have caused all the outer
2166
         expressions in *EXPR_P leading to P to also have had
2167
         TREE_SIDE_EFFECTS set.  */
2168
      recalculate_side_effects (t);
2169
    }
2170
 
2171
  /* If the outermost expression is a COMPONENT_REF, canonicalize its type.  */
2172
  if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2173
    {
2174
      canonicalize_component_ref (expr_p);
2175
    }
2176
 
2177
  VEC_free (tree, heap, stack);
2178
 
2179
  gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2180
 
2181
  return ret;
2182
}
2183
 
2184
/*  Gimplify the self modifying expression pointed to by EXPR_P
2185
    (++, --, +=, -=).
2186
 
2187
    PRE_P points to the list where side effects that must happen before
2188
        *EXPR_P should be stored.
2189
 
2190
    POST_P points to the list where side effects that must happen after
2191
        *EXPR_P should be stored.
2192
 
2193
    WANT_VALUE is nonzero iff we want to use the value of this expression
2194
        in another expression.  */
2195
 
2196
static enum gimplify_status
2197
gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2198
                        bool want_value)
2199
{
2200
  enum tree_code code;
2201
  tree lhs, lvalue, rhs, t1;
2202
  gimple_seq post = NULL, *orig_post_p = post_p;
2203
  bool postfix;
2204
  enum tree_code arith_code;
2205
  enum gimplify_status ret;
2206
  location_t loc = EXPR_LOCATION (*expr_p);
2207
 
2208
  code = TREE_CODE (*expr_p);
2209
 
2210
  gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2211
              || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2212
 
2213
  /* Prefix or postfix?  */
2214
  if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2215
    /* Faster to treat as prefix if result is not used.  */
2216
    postfix = want_value;
2217
  else
2218
    postfix = false;
2219
 
2220
  /* For postfix, make sure the inner expression's post side effects
2221
     are executed after side effects from this expression.  */
2222
  if (postfix)
2223
    post_p = &post;
2224
 
2225
  /* Add or subtract?  */
2226
  if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2227
    arith_code = PLUS_EXPR;
2228
  else
2229
    arith_code = MINUS_EXPR;
2230
 
2231
  /* Gimplify the LHS into a GIMPLE lvalue.  */
2232
  lvalue = TREE_OPERAND (*expr_p, 0);
2233
  ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2234
  if (ret == GS_ERROR)
2235
    return ret;
2236
 
2237
  /* Extract the operands to the arithmetic operation.  */
2238
  lhs = lvalue;
2239
  rhs = TREE_OPERAND (*expr_p, 1);
2240
 
2241
  /* For postfix operator, we evaluate the LHS to an rvalue and then use
2242
     that as the result value and in the postqueue operation.  We also
2243
     make sure to make lvalue a minimal lval, see
2244
     gcc.c-torture/execute/20040313-1.c for an example where this matters.  */
2245
  if (postfix)
2246
    {
2247
      if (!is_gimple_min_lval (lvalue))
2248
        {
2249
          mark_addressable (lvalue);
2250
          lvalue = build_fold_addr_expr_loc (input_location, lvalue);
2251
          gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
2252
          lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
2253
        }
2254
      ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2255
      if (ret == GS_ERROR)
2256
        return ret;
2257
    }
2258
 
2259
  /* For POINTERs increment, use POINTER_PLUS_EXPR.  */
2260
  if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2261
    {
2262
      rhs = convert_to_ptrofftype_loc (loc, rhs);
2263
      if (arith_code == MINUS_EXPR)
2264
        rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2265
      arith_code = POINTER_PLUS_EXPR;
2266
    }
2267
 
2268
  t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2269
 
2270
  if (postfix)
2271
    {
2272
      gimplify_assign (lvalue, t1, orig_post_p);
2273
      gimplify_seq_add_seq (orig_post_p, post);
2274
      *expr_p = lhs;
2275
      return GS_ALL_DONE;
2276
    }
2277
  else
2278
    {
2279
      *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2280
      return GS_OK;
2281
    }
2282
}
2283
 
2284
/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR.  */
2285
 
2286
static void
2287
maybe_with_size_expr (tree *expr_p)
2288
{
2289
  tree expr = *expr_p;
2290
  tree type = TREE_TYPE (expr);
2291
  tree size;
2292
 
2293
  /* If we've already wrapped this or the type is error_mark_node, we can't do
2294
     anything.  */
2295
  if (TREE_CODE (expr) == WITH_SIZE_EXPR
2296
      || type == error_mark_node)
2297
    return;
2298
 
2299
  /* If the size isn't known or is a constant, we have nothing to do.  */
2300
  size = TYPE_SIZE_UNIT (type);
2301
  if (!size || TREE_CODE (size) == INTEGER_CST)
2302
    return;
2303
 
2304
  /* Otherwise, make a WITH_SIZE_EXPR.  */
2305
  size = unshare_expr (size);
2306
  size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2307
  *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2308
}
2309
 
2310
/* Helper for gimplify_call_expr.  Gimplify a single argument *ARG_P
2311
   Store any side-effects in PRE_P.  CALL_LOCATION is the location of
2312
   the CALL_EXPR.  */
2313
 
2314
static enum gimplify_status
2315
gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2316
{
2317
  bool (*test) (tree);
2318
  fallback_t fb;
2319
 
2320
  /* In general, we allow lvalues for function arguments to avoid
2321
     extra overhead of copying large aggregates out of even larger
2322
     aggregates into temporaries only to copy the temporaries to
2323
     the argument list.  Make optimizers happy by pulling out to
2324
     temporaries those types that fit in registers.  */
2325
  if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2326
    test = is_gimple_val, fb = fb_rvalue;
2327
  else
2328
    {
2329
      test = is_gimple_lvalue, fb = fb_either;
2330
      /* Also strip a TARGET_EXPR that would force an extra copy.  */
2331
      if (TREE_CODE (*arg_p) == TARGET_EXPR)
2332
        {
2333
          tree init = TARGET_EXPR_INITIAL (*arg_p);
2334
          if (init
2335
              && !VOID_TYPE_P (TREE_TYPE (init)))
2336
            *arg_p = init;
2337
        }
2338
    }
2339
 
2340
  /* If this is a variable sized type, we must remember the size.  */
2341
  maybe_with_size_expr (arg_p);
2342
 
2343
  /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c.  */
2344
  /* Make sure arguments have the same location as the function call
2345
     itself.  */
2346
  protected_set_expr_location (*arg_p, call_location);
2347
 
2348
  /* There is a sequence point before a function call.  Side effects in
2349
     the argument list must occur before the actual call. So, when
2350
     gimplifying arguments, force gimplify_expr to use an internal
2351
     post queue which is then appended to the end of PRE_P.  */
2352
  return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2353
}
2354
 
2355
/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2356
   WANT_VALUE is true if the result of the call is desired.  */
2357
 
2358
static enum gimplify_status
2359
gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2360
{
2361
  tree fndecl, parms, p, fnptrtype;
2362
  enum gimplify_status ret;
2363
  int i, nargs;
2364
  gimple call;
2365
  bool builtin_va_start_p = FALSE;
2366
  location_t loc = EXPR_LOCATION (*expr_p);
2367
 
2368
  gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2369
 
2370
  /* For reliable diagnostics during inlining, it is necessary that
2371
     every call_expr be annotated with file and line.  */
2372
  if (! EXPR_HAS_LOCATION (*expr_p))
2373
    SET_EXPR_LOCATION (*expr_p, input_location);
2374
 
2375
  /* This may be a call to a builtin function.
2376
 
2377
     Builtin function calls may be transformed into different
2378
     (and more efficient) builtin function calls under certain
2379
     circumstances.  Unfortunately, gimplification can muck things
2380
     up enough that the builtin expanders are not aware that certain
2381
     transformations are still valid.
2382
 
2383
     So we attempt transformation/gimplification of the call before
2384
     we gimplify the CALL_EXPR.  At this time we do not manage to
2385
     transform all calls in the same manner as the expanders do, but
2386
     we do transform most of them.  */
2387
  fndecl = get_callee_fndecl (*expr_p);
2388
  if (fndecl && DECL_BUILT_IN (fndecl))
2389
    {
2390
      tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2391
 
2392
      if (new_tree && new_tree != *expr_p)
2393
        {
2394
          /* There was a transformation of this call which computes the
2395
             same value, but in a more efficient way.  Return and try
2396
             again.  */
2397
          *expr_p = new_tree;
2398
          return GS_OK;
2399
        }
2400
 
2401
      if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2402
          && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2403
        {
2404
          builtin_va_start_p = TRUE;
2405
          if (call_expr_nargs (*expr_p) < 2)
2406
            {
2407
              error ("too few arguments to function %<va_start%>");
2408
              *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2409
              return GS_OK;
2410
            }
2411
 
2412
          if (fold_builtin_next_arg (*expr_p, true))
2413
            {
2414
              *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2415
              return GS_OK;
2416
            }
2417
        }
2418
    }
2419
 
2420
  /* Remember the original function pointer type.  */
2421
  fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2422
 
2423
  /* There is a sequence point before the call, so any side effects in
2424
     the calling expression must occur before the actual call.  Force
2425
     gimplify_expr to use an internal post queue.  */
2426
  ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2427
                       is_gimple_call_addr, fb_rvalue);
2428
 
2429
  nargs = call_expr_nargs (*expr_p);
2430
 
2431
  /* Get argument types for verification.  */
2432
  fndecl = get_callee_fndecl (*expr_p);
2433
  parms = NULL_TREE;
2434
  if (fndecl)
2435
    parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2436
  else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2437
    parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2438
 
2439
  if (fndecl && DECL_ARGUMENTS (fndecl))
2440
    p = DECL_ARGUMENTS (fndecl);
2441
  else if (parms)
2442
    p = parms;
2443
  else
2444
    p = NULL_TREE;
2445
  for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2446
    ;
2447
 
2448
  /* If the last argument is __builtin_va_arg_pack () and it is not
2449
     passed as a named argument, decrease the number of CALL_EXPR
2450
     arguments and set instead the CALL_EXPR_VA_ARG_PACK flag.  */
2451
  if (!p
2452
      && i < nargs
2453
      && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2454
    {
2455
      tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2456
      tree last_arg_fndecl = get_callee_fndecl (last_arg);
2457
 
2458
      if (last_arg_fndecl
2459
          && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2460
          && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2461
          && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2462
        {
2463
          tree call = *expr_p;
2464
 
2465
          --nargs;
2466
          *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2467
                                          CALL_EXPR_FN (call),
2468
                                          nargs, CALL_EXPR_ARGP (call));
2469
 
2470
          /* Copy all CALL_EXPR flags, location and block, except
2471
             CALL_EXPR_VA_ARG_PACK flag.  */
2472
          CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2473
          CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2474
          CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2475
            = CALL_EXPR_RETURN_SLOT_OPT (call);
2476
          CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2477
          SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2478
          TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2479
 
2480
          /* Set CALL_EXPR_VA_ARG_PACK.  */
2481
          CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2482
        }
2483
    }
2484
 
2485
  /* Finally, gimplify the function arguments.  */
2486
  if (nargs > 0)
2487
    {
2488
      for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2489
           PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2490
           PUSH_ARGS_REVERSED ? i-- : i++)
2491
        {
2492
          enum gimplify_status t;
2493
 
2494
          /* Avoid gimplifying the second argument to va_start, which needs to
2495
             be the plain PARM_DECL.  */
2496
          if ((i != 1) || !builtin_va_start_p)
2497
            {
2498
              t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2499
                                EXPR_LOCATION (*expr_p));
2500
 
2501
              if (t == GS_ERROR)
2502
                ret = GS_ERROR;
2503
            }
2504
        }
2505
    }
2506
 
2507
  /* Verify the function result.  */
2508
  if (want_value && fndecl
2509
      && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2510
    {
2511
      error_at (loc, "using result of function returning %<void%>");
2512
      ret = GS_ERROR;
2513
    }
2514
 
2515
  /* Try this again in case gimplification exposed something.  */
2516
  if (ret != GS_ERROR)
2517
    {
2518
      tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2519
 
2520
      if (new_tree && new_tree != *expr_p)
2521
        {
2522
          /* There was a transformation of this call which computes the
2523
             same value, but in a more efficient way.  Return and try
2524
             again.  */
2525
          *expr_p = new_tree;
2526
          return GS_OK;
2527
        }
2528
    }
2529
  else
2530
    {
2531
      *expr_p = error_mark_node;
2532
      return GS_ERROR;
2533
    }
2534
 
2535
  /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2536
     decl.  This allows us to eliminate redundant or useless
2537
     calls to "const" functions.  */
2538
  if (TREE_CODE (*expr_p) == CALL_EXPR)
2539
    {
2540
      int flags = call_expr_flags (*expr_p);
2541
      if (flags & (ECF_CONST | ECF_PURE)
2542
          /* An infinite loop is considered a side effect.  */
2543
          && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2544
        TREE_SIDE_EFFECTS (*expr_p) = 0;
2545
    }
2546
 
2547
  /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2548
     and clear *EXPR_P.  Otherwise, leave *EXPR_P in its gimplified
2549
     form and delegate the creation of a GIMPLE_CALL to
2550
     gimplify_modify_expr.  This is always possible because when
2551
     WANT_VALUE is true, the caller wants the result of this call into
2552
     a temporary, which means that we will emit an INIT_EXPR in
2553
     internal_get_tmp_var which will then be handled by
2554
     gimplify_modify_expr.  */
2555
  if (!want_value)
2556
    {
2557
      /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2558
         have to do is replicate it as a GIMPLE_CALL tuple.  */
2559
      gimple_stmt_iterator gsi;
2560
      call = gimple_build_call_from_tree (*expr_p);
2561
      gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2562
      gimplify_seq_add_stmt (pre_p, call);
2563
      gsi = gsi_last (*pre_p);
2564
      fold_stmt (&gsi);
2565
      *expr_p = NULL_TREE;
2566
    }
2567
  else
2568
    /* Remember the original function type.  */
2569
    CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2570
                                     CALL_EXPR_FN (*expr_p));
2571
 
2572
  return ret;
2573
}
2574
 
2575
/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2576
   rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2577
 
2578
   TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2579
   condition is true or false, respectively.  If null, we should generate
2580
   our own to skip over the evaluation of this specific expression.
2581
 
2582
   LOCUS is the source location of the COND_EXPR.
2583
 
2584
   This function is the tree equivalent of do_jump.
2585
 
2586
   shortcut_cond_r should only be called by shortcut_cond_expr.  */
2587
 
2588
static tree
2589
shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2590
                 location_t locus)
2591
{
2592
  tree local_label = NULL_TREE;
2593
  tree t, expr = NULL;
2594
 
2595
  /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2596
     retain the shortcut semantics.  Just insert the gotos here;
2597
     shortcut_cond_expr will append the real blocks later.  */
2598
  if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2599
    {
2600
      location_t new_locus;
2601
 
2602
      /* Turn if (a && b) into
2603
 
2604
         if (a); else goto no;
2605
         if (b) goto yes; else goto no;
2606
         (no:) */
2607
 
2608
      if (false_label_p == NULL)
2609
        false_label_p = &local_label;
2610
 
2611
      /* Keep the original source location on the first 'if'.  */
2612
      t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2613
      append_to_statement_list (t, &expr);
2614
 
2615
      /* Set the source location of the && on the second 'if'.  */
2616
      new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2617
      t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2618
                           new_locus);
2619
      append_to_statement_list (t, &expr);
2620
    }
2621
  else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2622
    {
2623
      location_t new_locus;
2624
 
2625
      /* Turn if (a || b) into
2626
 
2627
         if (a) goto yes;
2628
         if (b) goto yes; else goto no;
2629
         (yes:) */
2630
 
2631
      if (true_label_p == NULL)
2632
        true_label_p = &local_label;
2633
 
2634
      /* Keep the original source location on the first 'if'.  */
2635
      t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2636
      append_to_statement_list (t, &expr);
2637
 
2638
      /* Set the source location of the || on the second 'if'.  */
2639
      new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2640
      t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2641
                           new_locus);
2642
      append_to_statement_list (t, &expr);
2643
    }
2644
  else if (TREE_CODE (pred) == COND_EXPR
2645
           && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2646
           && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2647
    {
2648
      location_t new_locus;
2649
 
2650
      /* As long as we're messing with gotos, turn if (a ? b : c) into
2651
         if (a)
2652
           if (b) goto yes; else goto no;
2653
         else
2654
           if (c) goto yes; else goto no;
2655
 
2656
         Don't do this if one of the arms has void type, which can happen
2657
         in C++ when the arm is throw.  */
2658
 
2659
      /* Keep the original source location on the first 'if'.  Set the source
2660
         location of the ? on the second 'if'.  */
2661
      new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2662
      expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2663
                     shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2664
                                      false_label_p, locus),
2665
                     shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2666
                                      false_label_p, new_locus));
2667
    }
2668
  else
2669
    {
2670
      expr = build3 (COND_EXPR, void_type_node, pred,
2671
                     build_and_jump (true_label_p),
2672
                     build_and_jump (false_label_p));
2673
      SET_EXPR_LOCATION (expr, locus);
2674
    }
2675
 
2676
  if (local_label)
2677
    {
2678
      t = build1 (LABEL_EXPR, void_type_node, local_label);
2679
      append_to_statement_list (t, &expr);
2680
    }
2681
 
2682
  return expr;
2683
}
2684
 
2685
/* Given a conditional expression EXPR with short-circuit boolean
2686
   predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2687
   predicate appart into the equivalent sequence of conditionals.  */
2688
 
2689
static tree
2690
shortcut_cond_expr (tree expr)
2691
{
2692
  tree pred = TREE_OPERAND (expr, 0);
2693
  tree then_ = TREE_OPERAND (expr, 1);
2694
  tree else_ = TREE_OPERAND (expr, 2);
2695
  tree true_label, false_label, end_label, t;
2696
  tree *true_label_p;
2697
  tree *false_label_p;
2698
  bool emit_end, emit_false, jump_over_else;
2699
  bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2700
  bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2701
 
2702
  /* First do simple transformations.  */
2703
  if (!else_se)
2704
    {
2705
      /* If there is no 'else', turn
2706
           if (a && b) then c
2707
         into
2708
           if (a) if (b) then c.  */
2709
      while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2710
        {
2711
          /* Keep the original source location on the first 'if'.  */
2712
          location_t locus = EXPR_LOC_OR_HERE (expr);
2713
          TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2714
          /* Set the source location of the && on the second 'if'.  */
2715
          if (EXPR_HAS_LOCATION (pred))
2716
            SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2717
          then_ = shortcut_cond_expr (expr);
2718
          then_se = then_ && TREE_SIDE_EFFECTS (then_);
2719
          pred = TREE_OPERAND (pred, 0);
2720
          expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2721
          SET_EXPR_LOCATION (expr, locus);
2722
        }
2723
    }
2724
 
2725
  if (!then_se)
2726
    {
2727
      /* If there is no 'then', turn
2728
           if (a || b); else d
2729
         into
2730
           if (a); else if (b); else d.  */
2731
      while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2732
        {
2733
          /* Keep the original source location on the first 'if'.  */
2734
          location_t locus = EXPR_LOC_OR_HERE (expr);
2735
          TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2736
          /* Set the source location of the || on the second 'if'.  */
2737
          if (EXPR_HAS_LOCATION (pred))
2738
            SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2739
          else_ = shortcut_cond_expr (expr);
2740
          else_se = else_ && TREE_SIDE_EFFECTS (else_);
2741
          pred = TREE_OPERAND (pred, 0);
2742
          expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2743
          SET_EXPR_LOCATION (expr, locus);
2744
        }
2745
    }
2746
 
2747
  /* If we're done, great.  */
2748
  if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2749
      && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2750
    return expr;
2751
 
2752
  /* Otherwise we need to mess with gotos.  Change
2753
       if (a) c; else d;
2754
     to
2755
       if (a); else goto no;
2756
       c; goto end;
2757
       no: d; end:
2758
     and recursively gimplify the condition.  */
2759
 
2760
  true_label = false_label = end_label = NULL_TREE;
2761
 
2762
  /* If our arms just jump somewhere, hijack those labels so we don't
2763
     generate jumps to jumps.  */
2764
 
2765
  if (then_
2766
      && TREE_CODE (then_) == GOTO_EXPR
2767
      && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2768
    {
2769
      true_label = GOTO_DESTINATION (then_);
2770
      then_ = NULL;
2771
      then_se = false;
2772
    }
2773
 
2774
  if (else_
2775
      && TREE_CODE (else_) == GOTO_EXPR
2776
      && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2777
    {
2778
      false_label = GOTO_DESTINATION (else_);
2779
      else_ = NULL;
2780
      else_se = false;
2781
    }
2782
 
2783
  /* If we aren't hijacking a label for the 'then' branch, it falls through.  */
2784
  if (true_label)
2785
    true_label_p = &true_label;
2786
  else
2787
    true_label_p = NULL;
2788
 
2789
  /* The 'else' branch also needs a label if it contains interesting code.  */
2790
  if (false_label || else_se)
2791
    false_label_p = &false_label;
2792
  else
2793
    false_label_p = NULL;
2794
 
2795
  /* If there was nothing else in our arms, just forward the label(s).  */
2796
  if (!then_se && !else_se)
2797
    return shortcut_cond_r (pred, true_label_p, false_label_p,
2798
                            EXPR_LOC_OR_HERE (expr));
2799
 
2800
  /* If our last subexpression already has a terminal label, reuse it.  */
2801
  if (else_se)
2802
    t = expr_last (else_);
2803
  else if (then_se)
2804
    t = expr_last (then_);
2805
  else
2806
    t = NULL;
2807
  if (t && TREE_CODE (t) == LABEL_EXPR)
2808
    end_label = LABEL_EXPR_LABEL (t);
2809
 
2810
  /* If we don't care about jumping to the 'else' branch, jump to the end
2811
     if the condition is false.  */
2812
  if (!false_label_p)
2813
    false_label_p = &end_label;
2814
 
2815
  /* We only want to emit these labels if we aren't hijacking them.  */
2816
  emit_end = (end_label == NULL_TREE);
2817
  emit_false = (false_label == NULL_TREE);
2818
 
2819
  /* We only emit the jump over the else clause if we have to--if the
2820
     then clause may fall through.  Otherwise we can wind up with a
2821
     useless jump and a useless label at the end of gimplified code,
2822
     which will cause us to think that this conditional as a whole
2823
     falls through even if it doesn't.  If we then inline a function
2824
     which ends with such a condition, that can cause us to issue an
2825
     inappropriate warning about control reaching the end of a
2826
     non-void function.  */
2827
  jump_over_else = block_may_fallthru (then_);
2828
 
2829
  pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2830
                          EXPR_LOC_OR_HERE (expr));
2831
 
2832
  expr = NULL;
2833
  append_to_statement_list (pred, &expr);
2834
 
2835
  append_to_statement_list (then_, &expr);
2836
  if (else_se)
2837
    {
2838
      if (jump_over_else)
2839
        {
2840
          tree last = expr_last (expr);
2841
          t = build_and_jump (&end_label);
2842
          if (EXPR_HAS_LOCATION (last))
2843
            SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2844
          append_to_statement_list (t, &expr);
2845
        }
2846
      if (emit_false)
2847
        {
2848
          t = build1 (LABEL_EXPR, void_type_node, false_label);
2849
          append_to_statement_list (t, &expr);
2850
        }
2851
      append_to_statement_list (else_, &expr);
2852
    }
2853
  if (emit_end && end_label)
2854
    {
2855
      t = build1 (LABEL_EXPR, void_type_node, end_label);
2856
      append_to_statement_list (t, &expr);
2857
    }
2858
 
2859
  return expr;
2860
}
2861
 
2862
/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE.  */
2863
 
2864
tree
2865
gimple_boolify (tree expr)
2866
{
2867
  tree type = TREE_TYPE (expr);
2868
  location_t loc = EXPR_LOCATION (expr);
2869
 
2870
  if (TREE_CODE (expr) == NE_EXPR
2871
      && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2872
      && integer_zerop (TREE_OPERAND (expr, 1)))
2873
    {
2874
      tree call = TREE_OPERAND (expr, 0);
2875
      tree fn = get_callee_fndecl (call);
2876
 
2877
      /* For __builtin_expect ((long) (x), y) recurse into x as well
2878
         if x is truth_value_p.  */
2879
      if (fn
2880
          && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2881
          && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2882
          && call_expr_nargs (call) == 2)
2883
        {
2884
          tree arg = CALL_EXPR_ARG (call, 0);
2885
          if (arg)
2886
            {
2887
              if (TREE_CODE (arg) == NOP_EXPR
2888
                  && TREE_TYPE (arg) == TREE_TYPE (call))
2889
                arg = TREE_OPERAND (arg, 0);
2890
              if (truth_value_p (TREE_CODE (arg)))
2891
                {
2892
                  arg = gimple_boolify (arg);
2893
                  CALL_EXPR_ARG (call, 0)
2894
                    = fold_convert_loc (loc, TREE_TYPE (call), arg);
2895
                }
2896
            }
2897
        }
2898
    }
2899
 
2900
  switch (TREE_CODE (expr))
2901
    {
2902
    case TRUTH_AND_EXPR:
2903
    case TRUTH_OR_EXPR:
2904
    case TRUTH_XOR_EXPR:
2905
    case TRUTH_ANDIF_EXPR:
2906
    case TRUTH_ORIF_EXPR:
2907
      /* Also boolify the arguments of truth exprs.  */
2908
      TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2909
      /* FALLTHRU */
2910
 
2911
    case TRUTH_NOT_EXPR:
2912
      TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2913
 
2914
      /* These expressions always produce boolean results.  */
2915
      if (TREE_CODE (type) != BOOLEAN_TYPE)
2916
        TREE_TYPE (expr) = boolean_type_node;
2917
      return expr;
2918
 
2919
    default:
2920
      if (COMPARISON_CLASS_P (expr))
2921
        {
2922
          /* There expressions always prduce boolean results.  */
2923
          if (TREE_CODE (type) != BOOLEAN_TYPE)
2924
            TREE_TYPE (expr) = boolean_type_node;
2925
          return expr;
2926
        }
2927
      /* Other expressions that get here must have boolean values, but
2928
         might need to be converted to the appropriate mode.  */
2929
      if (TREE_CODE (type) == BOOLEAN_TYPE)
2930
        return expr;
2931
      return fold_convert_loc (loc, boolean_type_node, expr);
2932
    }
2933
}
2934
 
2935
/* Given a conditional expression *EXPR_P without side effects, gimplify
2936
   its operands.  New statements are inserted to PRE_P.  */
2937
 
2938
static enum gimplify_status
2939
gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2940
{
2941
  tree expr = *expr_p, cond;
2942
  enum gimplify_status ret, tret;
2943
  enum tree_code code;
2944
 
2945
  cond = gimple_boolify (COND_EXPR_COND (expr));
2946
 
2947
  /* We need to handle && and || specially, as their gimplification
2948
     creates pure cond_expr, thus leading to an infinite cycle otherwise.  */
2949
  code = TREE_CODE (cond);
2950
  if (code == TRUTH_ANDIF_EXPR)
2951
    TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2952
  else if (code == TRUTH_ORIF_EXPR)
2953
    TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2954
  ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2955
  COND_EXPR_COND (*expr_p) = cond;
2956
 
2957
  tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2958
                                   is_gimple_val, fb_rvalue);
2959
  ret = MIN (ret, tret);
2960
  tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2961
                                   is_gimple_val, fb_rvalue);
2962
 
2963
  return MIN (ret, tret);
2964
}
2965
 
2966
/* Return true if evaluating EXPR could trap.
2967
   EXPR is GENERIC, while tree_could_trap_p can be called
2968
   only on GIMPLE.  */
2969
 
2970
static bool
2971
generic_expr_could_trap_p (tree expr)
2972
{
2973
  unsigned i, n;
2974
 
2975
  if (!expr || is_gimple_val (expr))
2976
    return false;
2977
 
2978
  if (!EXPR_P (expr) || tree_could_trap_p (expr))
2979
    return true;
2980
 
2981
  n = TREE_OPERAND_LENGTH (expr);
2982
  for (i = 0; i < n; i++)
2983
    if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2984
      return true;
2985
 
2986
  return false;
2987
}
2988
 
2989
/*  Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2990
    into
2991
 
2992
    if (p)                      if (p)
2993
      t1 = a;                     a;
2994
    else                or      else
2995
      t1 = b;                     b;
2996
    t1;
2997
 
2998
    The second form is used when *EXPR_P is of type void.
2999
 
3000
    PRE_P points to the list where side effects that must happen before
3001
      *EXPR_P should be stored.  */
3002
 
3003
static enum gimplify_status
3004
gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3005
{
3006
  tree expr = *expr_p;
3007
  tree type = TREE_TYPE (expr);
3008
  location_t loc = EXPR_LOCATION (expr);
3009
  tree tmp, arm1, arm2;
3010
  enum gimplify_status ret;
3011
  tree label_true, label_false, label_cont;
3012
  bool have_then_clause_p, have_else_clause_p;
3013
  gimple gimple_cond;
3014
  enum tree_code pred_code;
3015
  gimple_seq seq = NULL;
3016
 
3017
  /* If this COND_EXPR has a value, copy the values into a temporary within
3018
     the arms.  */
3019
  if (!VOID_TYPE_P (type))
3020
    {
3021
      tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3022
      tree result;
3023
 
3024
      /* If either an rvalue is ok or we do not require an lvalue, create the
3025
         temporary.  But we cannot do that if the type is addressable.  */
3026
      if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3027
          && !TREE_ADDRESSABLE (type))
3028
        {
3029
          if (gimplify_ctxp->allow_rhs_cond_expr
3030
              /* If either branch has side effects or could trap, it can't be
3031
                 evaluated unconditionally.  */
3032
              && !TREE_SIDE_EFFECTS (then_)
3033
              && !generic_expr_could_trap_p (then_)
3034
              && !TREE_SIDE_EFFECTS (else_)
3035
              && !generic_expr_could_trap_p (else_))
3036
            return gimplify_pure_cond_expr (expr_p, pre_p);
3037
 
3038
          tmp = create_tmp_var (type, "iftmp");
3039
          result = tmp;
3040
        }
3041
 
3042
      /* Otherwise, only create and copy references to the values.  */
3043
      else
3044
        {
3045
          type = build_pointer_type (type);
3046
 
3047
          if (!VOID_TYPE_P (TREE_TYPE (then_)))
3048
            then_ = build_fold_addr_expr_loc (loc, then_);
3049
 
3050
          if (!VOID_TYPE_P (TREE_TYPE (else_)))
3051
            else_ = build_fold_addr_expr_loc (loc, else_);
3052
 
3053
          expr
3054
            = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3055
 
3056
          tmp = create_tmp_var (type, "iftmp");
3057
          result = build_simple_mem_ref_loc (loc, tmp);
3058
        }
3059
 
3060
      /* Build the new then clause, `tmp = then_;'.  But don't build the
3061
         assignment if the value is void; in C++ it can be if it's a throw.  */
3062
      if (!VOID_TYPE_P (TREE_TYPE (then_)))
3063
        TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3064
 
3065
      /* Similarly, build the new else clause, `tmp = else_;'.  */
3066
      if (!VOID_TYPE_P (TREE_TYPE (else_)))
3067
        TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3068
 
3069
      TREE_TYPE (expr) = void_type_node;
3070
      recalculate_side_effects (expr);
3071
 
3072
      /* Move the COND_EXPR to the prequeue.  */
3073
      gimplify_stmt (&expr, pre_p);
3074
 
3075
      *expr_p = result;
3076
      return GS_ALL_DONE;
3077
    }
3078
 
3079
  /* Remove any COMPOUND_EXPR so the following cases will be caught.  */
3080
  STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3081
  if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3082
    gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3083
 
3084
  /* Make sure the condition has BOOLEAN_TYPE.  */
3085
  TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3086
 
3087
  /* Break apart && and || conditions.  */
3088
  if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3089
      || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3090
    {
3091
      expr = shortcut_cond_expr (expr);
3092
 
3093
      if (expr != *expr_p)
3094
        {
3095
          *expr_p = expr;
3096
 
3097
          /* We can't rely on gimplify_expr to re-gimplify the expanded
3098
             form properly, as cleanups might cause the target labels to be
3099
             wrapped in a TRY_FINALLY_EXPR.  To prevent that, we need to
3100
             set up a conditional context.  */
3101
          gimple_push_condition ();
3102
          gimplify_stmt (expr_p, &seq);
3103
          gimple_pop_condition (pre_p);
3104
          gimple_seq_add_seq (pre_p, seq);
3105
 
3106
          return GS_ALL_DONE;
3107
        }
3108
    }
3109
 
3110
  /* Now do the normal gimplification.  */
3111
 
3112
  /* Gimplify condition.  */
3113
  ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3114
                       fb_rvalue);
3115
  if (ret == GS_ERROR)
3116
    return GS_ERROR;
3117
  gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3118
 
3119
  gimple_push_condition ();
3120
 
3121
  have_then_clause_p = have_else_clause_p = false;
3122
  if (TREE_OPERAND (expr, 1) != NULL
3123
      && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3124
      && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3125
      && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3126
          == current_function_decl)
3127
      /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3128
         have different locations, otherwise we end up with incorrect
3129
         location information on the branches.  */
3130
      && (optimize
3131
          || !EXPR_HAS_LOCATION (expr)
3132
          || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3133
          || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3134
    {
3135
      label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3136
      have_then_clause_p = true;
3137
    }
3138
  else
3139
    label_true = create_artificial_label (UNKNOWN_LOCATION);
3140
  if (TREE_OPERAND (expr, 2) != NULL
3141
      && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3142
      && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3143
      && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3144
          == current_function_decl)
3145
      /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3146
         have different locations, otherwise we end up with incorrect
3147
         location information on the branches.  */
3148
      && (optimize
3149
          || !EXPR_HAS_LOCATION (expr)
3150
          || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3151
          || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3152
    {
3153
      label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3154
      have_else_clause_p = true;
3155
    }
3156
  else
3157
    label_false = create_artificial_label (UNKNOWN_LOCATION);
3158
 
3159
  gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3160
                                 &arm2);
3161
 
3162
  gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3163
                                   label_false);
3164
 
3165
  gimplify_seq_add_stmt (&seq, gimple_cond);
3166
  label_cont = NULL_TREE;
3167
  if (!have_then_clause_p)
3168
    {
3169
      /* For if (...) {} else { code; } put label_true after
3170
         the else block.  */
3171
      if (TREE_OPERAND (expr, 1) == NULL_TREE
3172
          && !have_else_clause_p
3173
          && TREE_OPERAND (expr, 2) != NULL_TREE)
3174
        label_cont = label_true;
3175
      else
3176
        {
3177
          gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3178
          have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3179
          /* For if (...) { code; } else {} or
3180
             if (...) { code; } else goto label; or
3181
             if (...) { code; return; } else { ... }
3182
             label_cont isn't needed.  */
3183
          if (!have_else_clause_p
3184
              && TREE_OPERAND (expr, 2) != NULL_TREE
3185
              && gimple_seq_may_fallthru (seq))
3186
            {
3187
              gimple g;
3188
              label_cont = create_artificial_label (UNKNOWN_LOCATION);
3189
 
3190
              g = gimple_build_goto (label_cont);
3191
 
3192
              /* GIMPLE_COND's are very low level; they have embedded
3193
                 gotos.  This particular embedded goto should not be marked
3194
                 with the location of the original COND_EXPR, as it would
3195
                 correspond to the COND_EXPR's condition, not the ELSE or the
3196
                 THEN arms.  To avoid marking it with the wrong location, flag
3197
                 it as "no location".  */
3198
              gimple_set_do_not_emit_location (g);
3199
 
3200
              gimplify_seq_add_stmt (&seq, g);
3201
            }
3202
        }
3203
    }
3204
  if (!have_else_clause_p)
3205
    {
3206
      gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3207
      have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3208
    }
3209
  if (label_cont)
3210
    gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3211
 
3212
  gimple_pop_condition (pre_p);
3213
  gimple_seq_add_seq (pre_p, seq);
3214
 
3215
  if (ret == GS_ERROR)
3216
    ; /* Do nothing.  */
3217
  else if (have_then_clause_p || have_else_clause_p)
3218
    ret = GS_ALL_DONE;
3219
  else
3220
    {
3221
      /* Both arms are empty; replace the COND_EXPR with its predicate.  */
3222
      expr = TREE_OPERAND (expr, 0);
3223
      gimplify_stmt (&expr, pre_p);
3224
    }
3225
 
3226
  *expr_p = NULL;
3227
  return ret;
3228
}
3229
 
3230
/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3231
   to be marked addressable.
3232
 
3233
   We cannot rely on such an expression being directly markable if a temporary
3234
   has been created by the gimplification.  In this case, we create another
3235
   temporary and initialize it with a copy, which will become a store after we
3236
   mark it addressable.  This can happen if the front-end passed us something
3237
   that it could not mark addressable yet, like a Fortran pass-by-reference
3238
   parameter (int) floatvar.  */
3239
 
3240
static void
3241
prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3242
{
3243
  while (handled_component_p (*expr_p))
3244
    expr_p = &TREE_OPERAND (*expr_p, 0);
3245
  if (is_gimple_reg (*expr_p))
3246
    *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3247
}
3248
 
3249
/* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
3250
   a call to __builtin_memcpy.  */
3251
 
3252
static enum gimplify_status
3253
gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3254
                                gimple_seq *seq_p)
3255
{
3256
  tree t, to, to_ptr, from, from_ptr;
3257
  gimple gs;
3258
  location_t loc = EXPR_LOCATION (*expr_p);
3259
 
3260
  to = TREE_OPERAND (*expr_p, 0);
3261
  from = TREE_OPERAND (*expr_p, 1);
3262
 
3263
  /* Mark the RHS addressable.  Beware that it may not be possible to do so
3264
     directly if a temporary has been created by the gimplification.  */
3265
  prepare_gimple_addressable (&from, seq_p);
3266
 
3267
  mark_addressable (from);
3268
  from_ptr = build_fold_addr_expr_loc (loc, from);
3269
  gimplify_arg (&from_ptr, seq_p, loc);
3270
 
3271
  mark_addressable (to);
3272
  to_ptr = build_fold_addr_expr_loc (loc, to);
3273
  gimplify_arg (&to_ptr, seq_p, loc);
3274
 
3275
  t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3276
 
3277
  gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3278
 
3279
  if (want_value)
3280
    {
3281
      /* tmp = memcpy() */
3282
      t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3283
      gimple_call_set_lhs (gs, t);
3284
      gimplify_seq_add_stmt (seq_p, gs);
3285
 
3286
      *expr_p = build_simple_mem_ref (t);
3287
      return GS_ALL_DONE;
3288
    }
3289
 
3290
  gimplify_seq_add_stmt (seq_p, gs);
3291
  *expr_p = NULL;
3292
  return GS_ALL_DONE;
3293
}
3294
 
3295
/* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
3296
   a call to __builtin_memset.  In this case we know that the RHS is
3297
   a CONSTRUCTOR with an empty element list.  */
3298
 
3299
static enum gimplify_status
3300
gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3301
                                gimple_seq *seq_p)
3302
{
3303
  tree t, from, to, to_ptr;
3304
  gimple gs;
3305
  location_t loc = EXPR_LOCATION (*expr_p);
3306
 
3307
  /* Assert our assumptions, to abort instead of producing wrong code
3308
     silently if they are not met.  Beware that the RHS CONSTRUCTOR might
3309
     not be immediately exposed.  */
3310
  from = TREE_OPERAND (*expr_p, 1);
3311
  if (TREE_CODE (from) == WITH_SIZE_EXPR)
3312
    from = TREE_OPERAND (from, 0);
3313
 
3314
  gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3315
              && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3316
 
3317
  /* Now proceed.  */
3318
  to = TREE_OPERAND (*expr_p, 0);
3319
 
3320
  to_ptr = build_fold_addr_expr_loc (loc, to);
3321
  gimplify_arg (&to_ptr, seq_p, loc);
3322
  t = builtin_decl_implicit (BUILT_IN_MEMSET);
3323
 
3324
  gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3325
 
3326
  if (want_value)
3327
    {
3328
      /* tmp = memset() */
3329
      t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3330
      gimple_call_set_lhs (gs, t);
3331
      gimplify_seq_add_stmt (seq_p, gs);
3332
 
3333
      *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3334
      return GS_ALL_DONE;
3335
    }
3336
 
3337
  gimplify_seq_add_stmt (seq_p, gs);
3338
  *expr_p = NULL;
3339
  return GS_ALL_DONE;
3340
}
3341
 
3342
/* A subroutine of gimplify_init_ctor_preeval.  Called via walk_tree,
3343
   determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3344
   assignment.  Return non-null if we detect a potential overlap.  */
3345
 
3346
struct gimplify_init_ctor_preeval_data
3347
{
3348
  /* The base decl of the lhs object.  May be NULL, in which case we
3349
     have to assume the lhs is indirect.  */
3350
  tree lhs_base_decl;
3351
 
3352
  /* The alias set of the lhs object.  */
3353
  alias_set_type lhs_alias_set;
3354
};
3355
 
3356
static tree
3357
gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3358
{
3359
  struct gimplify_init_ctor_preeval_data *data
3360
    = (struct gimplify_init_ctor_preeval_data *) xdata;
3361
  tree t = *tp;
3362
 
3363
  /* If we find the base object, obviously we have overlap.  */
3364
  if (data->lhs_base_decl == t)
3365
    return t;
3366
 
3367
  /* If the constructor component is indirect, determine if we have a
3368
     potential overlap with the lhs.  The only bits of information we
3369
     have to go on at this point are addressability and alias sets.  */
3370
  if ((INDIRECT_REF_P (t)
3371
       || TREE_CODE (t) == MEM_REF)
3372
      && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3373
      && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3374
    return t;
3375
 
3376
  /* If the constructor component is a call, determine if it can hide a
3377
     potential overlap with the lhs through an INDIRECT_REF like above.
3378
     ??? Ugh - this is completely broken.  In fact this whole analysis
3379
     doesn't look conservative.  */
3380
  if (TREE_CODE (t) == CALL_EXPR)
3381
    {
3382
      tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3383
 
3384
      for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3385
        if (POINTER_TYPE_P (TREE_VALUE (type))
3386
            && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3387
            && alias_sets_conflict_p (data->lhs_alias_set,
3388
                                      get_alias_set
3389
                                        (TREE_TYPE (TREE_VALUE (type)))))
3390
          return t;
3391
    }
3392
 
3393
  if (IS_TYPE_OR_DECL_P (t))
3394
    *walk_subtrees = 0;
3395
  return NULL;
3396
}
3397
 
3398
/* A subroutine of gimplify_init_constructor.  Pre-evaluate EXPR,
3399
   force values that overlap with the lhs (as described by *DATA)
3400
   into temporaries.  */
3401
 
3402
static void
3403
gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3404
                            struct gimplify_init_ctor_preeval_data *data)
3405
{
3406
  enum gimplify_status one;
3407
 
3408
  /* If the value is constant, then there's nothing to pre-evaluate.  */
3409
  if (TREE_CONSTANT (*expr_p))
3410
    {
3411
      /* Ensure it does not have side effects, it might contain a reference to
3412
         the object we're initializing.  */
3413
      gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3414
      return;
3415
    }
3416
 
3417
  /* If the type has non-trivial constructors, we can't pre-evaluate.  */
3418
  if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3419
    return;
3420
 
3421
  /* Recurse for nested constructors.  */
3422
  if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3423
    {
3424
      unsigned HOST_WIDE_INT ix;
3425
      constructor_elt *ce;
3426
      VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3427
 
3428
      FOR_EACH_VEC_ELT (constructor_elt, v, ix, ce)
3429
        gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3430
 
3431
      return;
3432
    }
3433
 
3434
  /* If this is a variable sized type, we must remember the size.  */
3435
  maybe_with_size_expr (expr_p);
3436
 
3437
  /* Gimplify the constructor element to something appropriate for the rhs
3438
     of a MODIFY_EXPR.  Given that we know the LHS is an aggregate, we know
3439
     the gimplifier will consider this a store to memory.  Doing this
3440
     gimplification now means that we won't have to deal with complicated
3441
     language-specific trees, nor trees like SAVE_EXPR that can induce
3442
     exponential search behavior.  */
3443
  one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3444
  if (one == GS_ERROR)
3445
    {
3446
      *expr_p = NULL;
3447
      return;
3448
    }
3449
 
3450
  /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3451
     with the lhs, since "a = { .x=a }" doesn't make sense.  This will
3452
     always be true for all scalars, since is_gimple_mem_rhs insists on a
3453
     temporary variable for them.  */
3454
  if (DECL_P (*expr_p))
3455
    return;
3456
 
3457
  /* If this is of variable size, we have no choice but to assume it doesn't
3458
     overlap since we can't make a temporary for it.  */
3459
  if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3460
    return;
3461
 
3462
  /* Otherwise, we must search for overlap ...  */
3463
  if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3464
    return;
3465
 
3466
  /* ... and if found, force the value into a temporary.  */
3467
  *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3468
}
3469
 
3470
/* A subroutine of gimplify_init_ctor_eval.  Create a loop for
3471
   a RANGE_EXPR in a CONSTRUCTOR for an array.
3472
 
3473
      var = lower;
3474
    loop_entry:
3475
      object[var] = value;
3476
      if (var == upper)
3477
        goto loop_exit;
3478
      var = var + 1;
3479
      goto loop_entry;
3480
    loop_exit:
3481
 
3482
   We increment var _after_ the loop exit check because we might otherwise
3483
   fail if upper == TYPE_MAX_VALUE (type for upper).
3484
 
3485
   Note that we never have to deal with SAVE_EXPRs here, because this has
3486
   already been taken care of for us, in gimplify_init_ctor_preeval().  */
3487
 
3488
static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3489
                                     gimple_seq *, bool);
3490
 
3491
static void
3492
gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3493
                               tree value, tree array_elt_type,
3494
                               gimple_seq *pre_p, bool cleared)
3495
{
3496
  tree loop_entry_label, loop_exit_label, fall_thru_label;
3497
  tree var, var_type, cref, tmp;
3498
 
3499
  loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3500
  loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3501
  fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3502
 
3503
  /* Create and initialize the index variable.  */
3504
  var_type = TREE_TYPE (upper);
3505
  var = create_tmp_var (var_type, NULL);
3506
  gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3507
 
3508
  /* Add the loop entry label.  */
3509
  gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3510
 
3511
  /* Build the reference.  */
3512
  cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3513
                 var, NULL_TREE, NULL_TREE);
3514
 
3515
  /* If we are a constructor, just call gimplify_init_ctor_eval to do
3516
     the store.  Otherwise just assign value to the reference.  */
3517
 
3518
  if (TREE_CODE (value) == CONSTRUCTOR)
3519
    /* NB we might have to call ourself recursively through
3520
       gimplify_init_ctor_eval if the value is a constructor.  */
3521
    gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3522
                             pre_p, cleared);
3523
  else
3524
    gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3525
 
3526
  /* We exit the loop when the index var is equal to the upper bound.  */
3527
  gimplify_seq_add_stmt (pre_p,
3528
                         gimple_build_cond (EQ_EXPR, var, upper,
3529
                                            loop_exit_label, fall_thru_label));
3530
 
3531
  gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3532
 
3533
  /* Otherwise, increment the index var...  */
3534
  tmp = build2 (PLUS_EXPR, var_type, var,
3535
                fold_convert (var_type, integer_one_node));
3536
  gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3537
 
3538
  /* ...and jump back to the loop entry.  */
3539
  gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3540
 
3541
  /* Add the loop exit label.  */
3542
  gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3543
}
3544
 
3545
/* Return true if FDECL is accessing a field that is zero sized.  */
3546
 
3547
static bool
3548
zero_sized_field_decl (const_tree fdecl)
3549
{
3550
  if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3551
      && integer_zerop (DECL_SIZE (fdecl)))
3552
    return true;
3553
  return false;
3554
}
3555
 
3556
/* Return true if TYPE is zero sized.  */
3557
 
3558
static bool
3559
zero_sized_type (const_tree type)
3560
{
3561
  if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3562
      && integer_zerop (TYPE_SIZE (type)))
3563
    return true;
3564
  return false;
3565
}
3566
 
3567
/* A subroutine of gimplify_init_constructor.  Generate individual
3568
   MODIFY_EXPRs for a CONSTRUCTOR.  OBJECT is the LHS against which the
3569
   assignments should happen.  ELTS is the CONSTRUCTOR_ELTS of the
3570
   CONSTRUCTOR.  CLEARED is true if the entire LHS object has been
3571
   zeroed first.  */
3572
 
3573
static void
3574
gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3575
                         gimple_seq *pre_p, bool cleared)
3576
{
3577
  tree array_elt_type = NULL;
3578
  unsigned HOST_WIDE_INT ix;
3579
  tree purpose, value;
3580
 
3581
  if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3582
    array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3583
 
3584
  FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3585
    {
3586
      tree cref;
3587
 
3588
      /* NULL values are created above for gimplification errors.  */
3589
      if (value == NULL)
3590
        continue;
3591
 
3592
      if (cleared && initializer_zerop (value))
3593
        continue;
3594
 
3595
      /* ??? Here's to hoping the front end fills in all of the indices,
3596
         so we don't have to figure out what's missing ourselves.  */
3597
      gcc_assert (purpose);
3598
 
3599
      /* Skip zero-sized fields, unless value has side-effects.  This can
3600
         happen with calls to functions returning a zero-sized type, which
3601
         we shouldn't discard.  As a number of downstream passes don't
3602
         expect sets of zero-sized fields, we rely on the gimplification of
3603
         the MODIFY_EXPR we make below to drop the assignment statement.  */
3604
      if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3605
        continue;
3606
 
3607
      /* If we have a RANGE_EXPR, we have to build a loop to assign the
3608
         whole range.  */
3609
      if (TREE_CODE (purpose) == RANGE_EXPR)
3610
        {
3611
          tree lower = TREE_OPERAND (purpose, 0);
3612
          tree upper = TREE_OPERAND (purpose, 1);
3613
 
3614
          /* If the lower bound is equal to upper, just treat it as if
3615
             upper was the index.  */
3616
          if (simple_cst_equal (lower, upper))
3617
            purpose = upper;
3618
          else
3619
            {
3620
              gimplify_init_ctor_eval_range (object, lower, upper, value,
3621
                                             array_elt_type, pre_p, cleared);
3622
              continue;
3623
            }
3624
        }
3625
 
3626
      if (array_elt_type)
3627
        {
3628
          /* Do not use bitsizetype for ARRAY_REF indices.  */
3629
          if (TYPE_DOMAIN (TREE_TYPE (object)))
3630
            purpose
3631
              = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3632
                              purpose);
3633
          cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3634
                         purpose, NULL_TREE, NULL_TREE);
3635
        }
3636
      else
3637
        {
3638
          gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3639
          cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3640
                         unshare_expr (object), purpose, NULL_TREE);
3641
        }
3642
 
3643
      if (TREE_CODE (value) == CONSTRUCTOR
3644
          && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3645
        gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3646
                                 pre_p, cleared);
3647
      else
3648
        {
3649
          tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3650
          gimplify_and_add (init, pre_p);
3651
          ggc_free (init);
3652
        }
3653
    }
3654
}
3655
 
3656
/* Return the appropriate RHS predicate for this LHS.  */
3657
 
3658
gimple_predicate
3659
rhs_predicate_for (tree lhs)
3660
{
3661
  if (is_gimple_reg (lhs))
3662
    return is_gimple_reg_rhs_or_call;
3663
  else
3664
    return is_gimple_mem_rhs_or_call;
3665
}
3666
 
3667
/* Gimplify a C99 compound literal expression.  This just means adding
3668
   the DECL_EXPR before the current statement and using its anonymous
3669
   decl instead.  */
3670
 
3671
static enum gimplify_status
3672
gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
3673
{
3674
  tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3675
  tree decl = DECL_EXPR_DECL (decl_s);
3676
  /* Mark the decl as addressable if the compound literal
3677
     expression is addressable now, otherwise it is marked too late
3678
     after we gimplify the initialization expression.  */
3679
  if (TREE_ADDRESSABLE (*expr_p))
3680
    TREE_ADDRESSABLE (decl) = 1;
3681
 
3682
  /* Preliminarily mark non-addressed complex variables as eligible
3683
     for promotion to gimple registers.  We'll transform their uses
3684
     as we find them.  */
3685
  if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3686
       || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3687
      && !TREE_THIS_VOLATILE (decl)
3688
      && !needs_to_live_in_memory (decl))
3689
    DECL_GIMPLE_REG_P (decl) = 1;
3690
 
3691
  /* This decl isn't mentioned in the enclosing block, so add it to the
3692
     list of temps.  FIXME it seems a bit of a kludge to say that
3693
     anonymous artificial vars aren't pushed, but everything else is.  */
3694
  if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3695
    gimple_add_tmp_var (decl);
3696
 
3697
  gimplify_and_add (decl_s, pre_p);
3698
  *expr_p = decl;
3699
  return GS_OK;
3700
}
3701
 
3702
/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3703
   return a new CONSTRUCTOR if something changed.  */
3704
 
3705
static tree
3706
optimize_compound_literals_in_ctor (tree orig_ctor)
3707
{
3708
  tree ctor = orig_ctor;
3709
  VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3710
  unsigned int idx, num = VEC_length (constructor_elt, elts);
3711
 
3712
  for (idx = 0; idx < num; idx++)
3713
    {
3714
      tree value = VEC_index (constructor_elt, elts, idx)->value;
3715
      tree newval = value;
3716
      if (TREE_CODE (value) == CONSTRUCTOR)
3717
        newval = optimize_compound_literals_in_ctor (value);
3718
      else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3719
        {
3720
          tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3721
          tree decl = DECL_EXPR_DECL (decl_s);
3722
          tree init = DECL_INITIAL (decl);
3723
 
3724
          if (!TREE_ADDRESSABLE (value)
3725
              && !TREE_ADDRESSABLE (decl)
3726
              && init)
3727
            newval = optimize_compound_literals_in_ctor (init);
3728
        }
3729
      if (newval == value)
3730
        continue;
3731
 
3732
      if (ctor == orig_ctor)
3733
        {
3734
          ctor = copy_node (orig_ctor);
3735
          CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3736
          elts = CONSTRUCTOR_ELTS (ctor);
3737
        }
3738
      VEC_index (constructor_elt, elts, idx)->value = newval;
3739
    }
3740
  return ctor;
3741
}
3742
 
3743
/* A subroutine of gimplify_modify_expr.  Break out elements of a
3744
   CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3745
 
3746
   Note that we still need to clear any elements that don't have explicit
3747
   initializers, so if not all elements are initialized we keep the
3748
   original MODIFY_EXPR, we just remove all of the constructor elements.
3749
 
3750
   If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3751
   GS_ERROR if we would have to create a temporary when gimplifying
3752
   this constructor.  Otherwise, return GS_OK.
3753
 
3754
   If NOTIFY_TEMP_CREATION is false, just do the gimplification.  */
3755
 
3756
static enum gimplify_status
3757
gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3758
                           bool want_value, bool notify_temp_creation)
3759
{
3760
  tree object, ctor, type;
3761
  enum gimplify_status ret;
3762
  VEC(constructor_elt,gc) *elts;
3763
 
3764
  gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3765
 
3766
  if (!notify_temp_creation)
3767
    {
3768
      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3769
                           is_gimple_lvalue, fb_lvalue);
3770
      if (ret == GS_ERROR)
3771
        return ret;
3772
    }
3773
 
3774
  object = TREE_OPERAND (*expr_p, 0);
3775
  ctor = TREE_OPERAND (*expr_p, 1) =
3776
    optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3777
  type = TREE_TYPE (ctor);
3778
  elts = CONSTRUCTOR_ELTS (ctor);
3779
  ret = GS_ALL_DONE;
3780
 
3781
  switch (TREE_CODE (type))
3782
    {
3783
    case RECORD_TYPE:
3784
    case UNION_TYPE:
3785
    case QUAL_UNION_TYPE:
3786
    case ARRAY_TYPE:
3787
      {
3788
        struct gimplify_init_ctor_preeval_data preeval_data;
3789
        HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3790
        bool cleared, complete_p, valid_const_initializer;
3791
 
3792
        /* Aggregate types must lower constructors to initialization of
3793
           individual elements.  The exception is that a CONSTRUCTOR node
3794
           with no elements indicates zero-initialization of the whole.  */
3795
        if (VEC_empty (constructor_elt, elts))
3796
          {
3797
            if (notify_temp_creation)
3798
              return GS_OK;
3799
            break;
3800
          }
3801
 
3802
        /* Fetch information about the constructor to direct later processing.
3803
           We might want to make static versions of it in various cases, and
3804
           can only do so if it known to be a valid constant initializer.  */
3805
        valid_const_initializer
3806
          = categorize_ctor_elements (ctor, &num_nonzero_elements,
3807
                                      &num_ctor_elements, &complete_p);
3808
 
3809
        /* If a const aggregate variable is being initialized, then it
3810
           should never be a lose to promote the variable to be static.  */
3811
        if (valid_const_initializer
3812
            && num_nonzero_elements > 1
3813
            && TREE_READONLY (object)
3814
            && TREE_CODE (object) == VAR_DECL
3815
            && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3816
          {
3817
            if (notify_temp_creation)
3818
              return GS_ERROR;
3819
            DECL_INITIAL (object) = ctor;
3820
            TREE_STATIC (object) = 1;
3821
            if (!DECL_NAME (object))
3822
              DECL_NAME (object) = create_tmp_var_name ("C");
3823
            walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3824
 
3825
            /* ??? C++ doesn't automatically append a .<number> to the
3826
               assembler name, and even when it does, it looks a FE private
3827
               data structures to figure out what that number should be,
3828
               which are not set for this variable.  I suppose this is
3829
               important for local statics for inline functions, which aren't
3830
               "local" in the object file sense.  So in order to get a unique
3831
               TU-local symbol, we must invoke the lhd version now.  */
3832
            lhd_set_decl_assembler_name (object);
3833
 
3834
            *expr_p = NULL_TREE;
3835
            break;
3836
          }
3837
 
3838
        /* If there are "lots" of initialized elements, even discounting
3839
           those that are not address constants (and thus *must* be
3840
           computed at runtime), then partition the constructor into
3841
           constant and non-constant parts.  Block copy the constant
3842
           parts in, then generate code for the non-constant parts.  */
3843
        /* TODO.  There's code in cp/typeck.c to do this.  */
3844
 
3845
        if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3846
          /* store_constructor will ignore the clearing of variable-sized
3847
             objects.  Initializers for such objects must explicitly set
3848
             every field that needs to be set.  */
3849
          cleared = false;
3850
        else if (!complete_p)
3851
          /* If the constructor isn't complete, clear the whole object
3852
             beforehand.
3853
 
3854
             ??? This ought not to be needed.  For any element not present
3855
             in the initializer, we should simply set them to zero.  Except
3856
             we'd need to *find* the elements that are not present, and that
3857
             requires trickery to avoid quadratic compile-time behavior in
3858
             large cases or excessive memory use in small cases.  */
3859
          cleared = true;
3860
        else if (num_ctor_elements - num_nonzero_elements
3861
                 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3862
                 && num_nonzero_elements < num_ctor_elements / 4)
3863
          /* If there are "lots" of zeros, it's more efficient to clear
3864
             the memory and then set the nonzero elements.  */
3865
          cleared = true;
3866
        else
3867
          cleared = false;
3868
 
3869
        /* If there are "lots" of initialized elements, and all of them
3870
           are valid address constants, then the entire initializer can
3871
           be dropped to memory, and then memcpy'd out.  Don't do this
3872
           for sparse arrays, though, as it's more efficient to follow
3873
           the standard CONSTRUCTOR behavior of memset followed by
3874
           individual element initialization.  Also don't do this for small
3875
           all-zero initializers (which aren't big enough to merit
3876
           clearing), and don't try to make bitwise copies of
3877
           TREE_ADDRESSABLE types.  */
3878
        if (valid_const_initializer
3879
            && !(cleared || num_nonzero_elements == 0)
3880
            && !TREE_ADDRESSABLE (type))
3881
          {
3882
            HOST_WIDE_INT size = int_size_in_bytes (type);
3883
            unsigned int align;
3884
 
3885
            /* ??? We can still get unbounded array types, at least
3886
               from the C++ front end.  This seems wrong, but attempt
3887
               to work around it for now.  */
3888
            if (size < 0)
3889
              {
3890
                size = int_size_in_bytes (TREE_TYPE (object));
3891
                if (size >= 0)
3892
                  TREE_TYPE (ctor) = type = TREE_TYPE (object);
3893
              }
3894
 
3895
            /* Find the maximum alignment we can assume for the object.  */
3896
            /* ??? Make use of DECL_OFFSET_ALIGN.  */
3897
            if (DECL_P (object))
3898
              align = DECL_ALIGN (object);
3899
            else
3900
              align = TYPE_ALIGN (type);
3901
 
3902
            if (size > 0
3903
                && num_nonzero_elements > 1
3904
                && !can_move_by_pieces (size, align))
3905
              {
3906
                if (notify_temp_creation)
3907
                  return GS_ERROR;
3908
 
3909
                walk_tree (&ctor, force_labels_r, NULL, NULL);
3910
                ctor = tree_output_constant_def (ctor);
3911
                if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3912
                  ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3913
                TREE_OPERAND (*expr_p, 1) = ctor;
3914
 
3915
                /* This is no longer an assignment of a CONSTRUCTOR, but
3916
                   we still may have processing to do on the LHS.  So
3917
                   pretend we didn't do anything here to let that happen.  */
3918
                return GS_UNHANDLED;
3919
              }
3920
          }
3921
 
3922
        /* If the target is volatile, we have non-zero elements and more than
3923
           one field to assign, initialize the target from a temporary.  */
3924
        if (TREE_THIS_VOLATILE (object)
3925
            && !TREE_ADDRESSABLE (type)
3926
            && num_nonzero_elements > 0
3927
            && VEC_length (constructor_elt, elts) > 1)
3928
          {
3929
            tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3930
            TREE_OPERAND (*expr_p, 0) = temp;
3931
            *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3932
                              *expr_p,
3933
                              build2 (MODIFY_EXPR, void_type_node,
3934
                                      object, temp));
3935
            return GS_OK;
3936
          }
3937
 
3938
        if (notify_temp_creation)
3939
          return GS_OK;
3940
 
3941
        /* If there are nonzero elements and if needed, pre-evaluate to capture
3942
           elements overlapping with the lhs into temporaries.  We must do this
3943
           before clearing to fetch the values before they are zeroed-out.  */
3944
        if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3945
          {
3946
            preeval_data.lhs_base_decl = get_base_address (object);
3947
            if (!DECL_P (preeval_data.lhs_base_decl))
3948
              preeval_data.lhs_base_decl = NULL;
3949
            preeval_data.lhs_alias_set = get_alias_set (object);
3950
 
3951
            gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3952
                                        pre_p, post_p, &preeval_data);
3953
          }
3954
 
3955
        if (cleared)
3956
          {
3957
            /* Zap the CONSTRUCTOR element list, which simplifies this case.
3958
               Note that we still have to gimplify, in order to handle the
3959
               case of variable sized types.  Avoid shared tree structures.  */
3960
            CONSTRUCTOR_ELTS (ctor) = NULL;
3961
            TREE_SIDE_EFFECTS (ctor) = 0;
3962
            object = unshare_expr (object);
3963
            gimplify_stmt (expr_p, pre_p);
3964
          }
3965
 
3966
        /* If we have not block cleared the object, or if there are nonzero
3967
           elements in the constructor, add assignments to the individual
3968
           scalar fields of the object.  */
3969
        if (!cleared || num_nonzero_elements > 0)
3970
          gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3971
 
3972
        *expr_p = NULL_TREE;
3973
      }
3974
      break;
3975
 
3976
    case COMPLEX_TYPE:
3977
      {
3978
        tree r, i;
3979
 
3980
        if (notify_temp_creation)
3981
          return GS_OK;
3982
 
3983
        /* Extract the real and imaginary parts out of the ctor.  */
3984
        gcc_assert (VEC_length (constructor_elt, elts) == 2);
3985
        r = VEC_index (constructor_elt, elts, 0)->value;
3986
        i = VEC_index (constructor_elt, elts, 1)->value;
3987
        if (r == NULL || i == NULL)
3988
          {
3989
            tree zero = build_zero_cst (TREE_TYPE (type));
3990
            if (r == NULL)
3991
              r = zero;
3992
            if (i == NULL)
3993
              i = zero;
3994
          }
3995
 
3996
        /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3997
           represent creation of a complex value.  */
3998
        if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3999
          {
4000
            ctor = build_complex (type, r, i);
4001
            TREE_OPERAND (*expr_p, 1) = ctor;
4002
          }
4003
        else
4004
          {
4005
            ctor = build2 (COMPLEX_EXPR, type, r, i);
4006
            TREE_OPERAND (*expr_p, 1) = ctor;
4007
            ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4008
                                 pre_p,
4009
                                 post_p,
4010
                                 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4011
                                 fb_rvalue);
4012
          }
4013
      }
4014
      break;
4015
 
4016
    case VECTOR_TYPE:
4017
      {
4018
        unsigned HOST_WIDE_INT ix;
4019
        constructor_elt *ce;
4020
 
4021
        if (notify_temp_creation)
4022
          return GS_OK;
4023
 
4024
        /* Go ahead and simplify constant constructors to VECTOR_CST.  */
4025
        if (TREE_CONSTANT (ctor))
4026
          {
4027
            bool constant_p = true;
4028
            tree value;
4029
 
4030
            /* Even when ctor is constant, it might contain non-*_CST
4031
               elements, such as addresses or trapping values like
4032
               1.0/0.0 - 1.0/0.0.  Such expressions don't belong
4033
               in VECTOR_CST nodes.  */
4034
            FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4035
              if (!CONSTANT_CLASS_P (value))
4036
                {
4037
                  constant_p = false;
4038
                  break;
4039
                }
4040
 
4041
            if (constant_p)
4042
              {
4043
                TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4044
                break;
4045
              }
4046
 
4047
            /* Don't reduce an initializer constant even if we can't
4048
               make a VECTOR_CST.  It won't do anything for us, and it'll
4049
               prevent us from representing it as a single constant.  */
4050
            if (initializer_constant_valid_p (ctor, type))
4051
              break;
4052
 
4053
            TREE_CONSTANT (ctor) = 0;
4054
          }
4055
 
4056
        /* Vector types use CONSTRUCTOR all the way through gimple
4057
          compilation as a general initializer.  */
4058
        FOR_EACH_VEC_ELT (constructor_elt, elts, ix, ce)
4059
          {
4060
            enum gimplify_status tret;
4061
            tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4062
                                  fb_rvalue);
4063
            if (tret == GS_ERROR)
4064
              ret = GS_ERROR;
4065
          }
4066
        if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4067
          TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4068
      }
4069
      break;
4070
 
4071
    default:
4072
      /* So how did we get a CONSTRUCTOR for a scalar type?  */
4073
      gcc_unreachable ();
4074
    }
4075
 
4076
  if (ret == GS_ERROR)
4077
    return GS_ERROR;
4078
  else if (want_value)
4079
    {
4080
      *expr_p = object;
4081
      return GS_OK;
4082
    }
4083
  else
4084
    {
4085
      /* If we have gimplified both sides of the initializer but have
4086
         not emitted an assignment, do so now.  */
4087
      if (*expr_p)
4088
        {
4089
          tree lhs = TREE_OPERAND (*expr_p, 0);
4090
          tree rhs = TREE_OPERAND (*expr_p, 1);
4091
          gimple init = gimple_build_assign (lhs, rhs);
4092
          gimplify_seq_add_stmt (pre_p, init);
4093
          *expr_p = NULL;
4094
        }
4095
 
4096
      return GS_ALL_DONE;
4097
    }
4098
}
4099
 
4100
/* Given a pointer value OP0, return a simplified version of an
4101
   indirection through OP0, or NULL_TREE if no simplification is
4102
   possible.  Note that the resulting type may be different from
4103
   the type pointed to in the sense that it is still compatible
4104
   from the langhooks point of view. */
4105
 
4106
tree
4107
gimple_fold_indirect_ref (tree t)
4108
{
4109
  tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
4110
  tree sub = t;
4111
  tree subtype;
4112
 
4113
  STRIP_NOPS (sub);
4114
  subtype = TREE_TYPE (sub);
4115
  if (!POINTER_TYPE_P (subtype))
4116
    return NULL_TREE;
4117
 
4118
  if (TREE_CODE (sub) == ADDR_EXPR)
4119
    {
4120
      tree op = TREE_OPERAND (sub, 0);
4121
      tree optype = TREE_TYPE (op);
4122
      /* *&p => p */
4123
      if (useless_type_conversion_p (type, optype))
4124
        return op;
4125
 
4126
      /* *(foo *)&fooarray => fooarray[0] */
4127
      if (TREE_CODE (optype) == ARRAY_TYPE
4128
          && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
4129
          && useless_type_conversion_p (type, TREE_TYPE (optype)))
4130
       {
4131
         tree type_domain = TYPE_DOMAIN (optype);
4132
         tree min_val = size_zero_node;
4133
         if (type_domain && TYPE_MIN_VALUE (type_domain))
4134
           min_val = TYPE_MIN_VALUE (type_domain);
4135
         if (TREE_CODE (min_val) == INTEGER_CST)
4136
           return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
4137
       }
4138
      /* *(foo *)&complexfoo => __real__ complexfoo */
4139
      else if (TREE_CODE (optype) == COMPLEX_TYPE
4140
               && useless_type_conversion_p (type, TREE_TYPE (optype)))
4141
        return fold_build1 (REALPART_EXPR, type, op);
4142
      /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
4143
      else if (TREE_CODE (optype) == VECTOR_TYPE
4144
               && useless_type_conversion_p (type, TREE_TYPE (optype)))
4145
        {
4146
          tree part_width = TYPE_SIZE (type);
4147
          tree index = bitsize_int (0);
4148
          return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
4149
        }
4150
    }
4151
 
4152
  /* *(p + CST) -> ...  */
4153
  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4154
      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4155
    {
4156
      tree addr = TREE_OPERAND (sub, 0);
4157
      tree off = TREE_OPERAND (sub, 1);
4158
      tree addrtype;
4159
 
4160
      STRIP_NOPS (addr);
4161
      addrtype = TREE_TYPE (addr);
4162
 
4163
      /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
4164
      if (TREE_CODE (addr) == ADDR_EXPR
4165
          && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
4166
          && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
4167
          && host_integerp (off, 1))
4168
        {
4169
          unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1);
4170
          tree part_width = TYPE_SIZE (type);
4171
          unsigned HOST_WIDE_INT part_widthi
4172
            = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
4173
          unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
4174
          tree index = bitsize_int (indexi);
4175
          if (offset / part_widthi
4176
              <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
4177
            return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
4178
                                part_width, index);
4179
        }
4180
 
4181
      /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
4182
      if (TREE_CODE (addr) == ADDR_EXPR
4183
          && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
4184
          && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
4185
        {
4186
          tree size = TYPE_SIZE_UNIT (type);
4187
          if (tree_int_cst_equal (size, off))
4188
            return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
4189
        }
4190
 
4191
      /* *(p + CST) -> MEM_REF <p, CST>.  */
4192
      if (TREE_CODE (addr) != ADDR_EXPR
4193
          || DECL_P (TREE_OPERAND (addr, 0)))
4194
        return fold_build2 (MEM_REF, type,
4195
                            addr,
4196
                            build_int_cst_wide (ptype,
4197
                                                TREE_INT_CST_LOW (off),
4198
                                                TREE_INT_CST_HIGH (off)));
4199
    }
4200
 
4201
  /* *(foo *)fooarrptr => (*fooarrptr)[0] */
4202
  if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
4203
      && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
4204
      && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
4205
    {
4206
      tree type_domain;
4207
      tree min_val = size_zero_node;
4208
      tree osub = sub;
4209
      sub = gimple_fold_indirect_ref (sub);
4210
      if (! sub)
4211
        sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
4212
      type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
4213
      if (type_domain && TYPE_MIN_VALUE (type_domain))
4214
        min_val = TYPE_MIN_VALUE (type_domain);
4215
      if (TREE_CODE (min_val) == INTEGER_CST)
4216
        return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
4217
    }
4218
 
4219
  return NULL_TREE;
4220
}
4221
 
4222
/* Given a pointer value OP0, return a simplified version of an
4223
   indirection through OP0, or NULL_TREE if no simplification is
4224
   possible.  This may only be applied to a rhs of an expression.
4225
   Note that the resulting type may be different from the type pointed
4226
   to in the sense that it is still compatible from the langhooks
4227
   point of view. */
4228
 
4229
static tree
4230
gimple_fold_indirect_ref_rhs (tree t)
4231
{
4232
  return gimple_fold_indirect_ref (t);
4233
}
4234
 
4235
/* Subroutine of gimplify_modify_expr to do simplifications of
4236
   MODIFY_EXPRs based on the code of the RHS.  We loop for as long as
4237
   something changes.  */
4238
 
4239
static enum gimplify_status
4240
gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4241
                          gimple_seq *pre_p, gimple_seq *post_p,
4242
                          bool want_value)
4243
{
4244
  enum gimplify_status ret = GS_UNHANDLED;
4245
  bool changed;
4246
 
4247
  do
4248
    {
4249
      changed = false;
4250
      switch (TREE_CODE (*from_p))
4251
        {
4252
        case VAR_DECL:
4253
          /* If we're assigning from a read-only variable initialized with
4254
             a constructor, do the direct assignment from the constructor,
4255
             but only if neither source nor target are volatile since this
4256
             latter assignment might end up being done on a per-field basis.  */
4257
          if (DECL_INITIAL (*from_p)
4258
              && TREE_READONLY (*from_p)
4259
              && !TREE_THIS_VOLATILE (*from_p)
4260
              && !TREE_THIS_VOLATILE (*to_p)
4261
              && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4262
            {
4263
              tree old_from = *from_p;
4264
              enum gimplify_status subret;
4265
 
4266
              /* Move the constructor into the RHS.  */
4267
              *from_p = unshare_expr (DECL_INITIAL (*from_p));
4268
 
4269
              /* Let's see if gimplify_init_constructor will need to put
4270
                 it in memory.  */
4271
              subret = gimplify_init_constructor (expr_p, NULL, NULL,
4272
                                                  false, true);
4273
              if (subret == GS_ERROR)
4274
                {
4275
                  /* If so, revert the change.  */
4276
                  *from_p = old_from;
4277
                }
4278
              else
4279
                {
4280
                  ret = GS_OK;
4281
                  changed = true;
4282
                }
4283
            }
4284
          break;
4285
        case INDIRECT_REF:
4286
          {
4287
            /* If we have code like
4288
 
4289
             *(const A*)(A*)&x
4290
 
4291
             where the type of "x" is a (possibly cv-qualified variant
4292
             of "A"), treat the entire expression as identical to "x".
4293
             This kind of code arises in C++ when an object is bound
4294
             to a const reference, and if "x" is a TARGET_EXPR we want
4295
             to take advantage of the optimization below.  */
4296
            bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4297
            tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4298
            if (t)
4299
              {
4300
                if (TREE_THIS_VOLATILE (t) != volatile_p)
4301
                  {
4302
                    if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4303
                      t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4304
                                                    build_fold_addr_expr (t));
4305
                    if (REFERENCE_CLASS_P (t))
4306
                      TREE_THIS_VOLATILE (t) = volatile_p;
4307
                  }
4308
                *from_p = t;
4309
                ret = GS_OK;
4310
                changed = true;
4311
              }
4312
            break;
4313
          }
4314
 
4315
        case TARGET_EXPR:
4316
          {
4317
            /* If we are initializing something from a TARGET_EXPR, strip the
4318
               TARGET_EXPR and initialize it directly, if possible.  This can't
4319
               be done if the initializer is void, since that implies that the
4320
               temporary is set in some non-trivial way.
4321
 
4322
               ??? What about code that pulls out the temp and uses it
4323
               elsewhere? I think that such code never uses the TARGET_EXPR as
4324
               an initializer.  If I'm wrong, we'll die because the temp won't
4325
               have any RTL.  In that case, I guess we'll need to replace
4326
               references somehow.  */
4327
            tree init = TARGET_EXPR_INITIAL (*from_p);
4328
 
4329
            if (init
4330
                && !VOID_TYPE_P (TREE_TYPE (init)))
4331
              {
4332
                *from_p = init;
4333
                ret = GS_OK;
4334
                changed = true;
4335
              }
4336
          }
4337
          break;
4338
 
4339
        case COMPOUND_EXPR:
4340
          /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4341
             caught.  */
4342
          gimplify_compound_expr (from_p, pre_p, true);
4343
          ret = GS_OK;
4344
          changed = true;
4345
          break;
4346
 
4347
        case CONSTRUCTOR:
4348
          /* If we already made some changes, let the front end have a
4349
             crack at this before we break it down.  */
4350
          if (ret != GS_UNHANDLED)
4351
            break;
4352
          /* If we're initializing from a CONSTRUCTOR, break this into
4353
             individual MODIFY_EXPRs.  */
4354
          return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4355
                                            false);
4356
 
4357
        case COND_EXPR:
4358
          /* If we're assigning to a non-register type, push the assignment
4359
             down into the branches.  This is mandatory for ADDRESSABLE types,
4360
             since we cannot generate temporaries for such, but it saves a
4361
             copy in other cases as well.  */
4362
          if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4363
            {
4364
              /* This code should mirror the code in gimplify_cond_expr. */
4365
              enum tree_code code = TREE_CODE (*expr_p);
4366
              tree cond = *from_p;
4367
              tree result = *to_p;
4368
 
4369
              ret = gimplify_expr (&result, pre_p, post_p,
4370
                                   is_gimple_lvalue, fb_lvalue);
4371
              if (ret != GS_ERROR)
4372
                ret = GS_OK;
4373
 
4374
              if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4375
                TREE_OPERAND (cond, 1)
4376
                  = build2 (code, void_type_node, result,
4377
                            TREE_OPERAND (cond, 1));
4378
              if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4379
                TREE_OPERAND (cond, 2)
4380
                  = build2 (code, void_type_node, unshare_expr (result),
4381
                            TREE_OPERAND (cond, 2));
4382
 
4383
              TREE_TYPE (cond) = void_type_node;
4384
              recalculate_side_effects (cond);
4385
 
4386
              if (want_value)
4387
                {
4388
                  gimplify_and_add (cond, pre_p);
4389
                  *expr_p = unshare_expr (result);
4390
                }
4391
              else
4392
                *expr_p = cond;
4393
              return ret;
4394
            }
4395
          break;
4396
 
4397
        case CALL_EXPR:
4398
          /* For calls that return in memory, give *to_p as the CALL_EXPR's
4399
             return slot so that we don't generate a temporary.  */
4400
          if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4401
              && aggregate_value_p (*from_p, *from_p))
4402
            {
4403
              bool use_target;
4404
 
4405
              if (!(rhs_predicate_for (*to_p))(*from_p))
4406
                /* If we need a temporary, *to_p isn't accurate.  */
4407
                use_target = false;
4408
              /* It's OK to use the return slot directly unless it's an NRV. */
4409
              else if (TREE_CODE (*to_p) == RESULT_DECL
4410
                       && DECL_NAME (*to_p) == NULL_TREE
4411
                       && needs_to_live_in_memory (*to_p))
4412
                use_target = true;
4413
              else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4414
                       || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4415
                /* Don't force regs into memory.  */
4416
                use_target = false;
4417
              else if (TREE_CODE (*expr_p) == INIT_EXPR)
4418
                /* It's OK to use the target directly if it's being
4419
                   initialized. */
4420
                use_target = true;
4421
              else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4422
                /* Always use the target and thus RSO for variable-sized types.
4423
                   GIMPLE cannot deal with a variable-sized assignment
4424
                   embedded in a call statement.  */
4425
                use_target = true;
4426
              else if (TREE_CODE (*to_p) != SSA_NAME
4427
                      && (!is_gimple_variable (*to_p)
4428
                          || needs_to_live_in_memory (*to_p)))
4429
                /* Don't use the original target if it's already addressable;
4430
                   if its address escapes, and the called function uses the
4431
                   NRV optimization, a conforming program could see *to_p
4432
                   change before the called function returns; see c++/19317.
4433
                   When optimizing, the return_slot pass marks more functions
4434
                   as safe after we have escape info.  */
4435
                use_target = false;
4436
              else
4437
                use_target = true;
4438
 
4439
              if (use_target)
4440
                {
4441
                  CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4442
                  mark_addressable (*to_p);
4443
                }
4444
            }
4445
          break;
4446
 
4447
        case WITH_SIZE_EXPR:
4448
          /* Likewise for calls that return an aggregate of non-constant size,
4449
             since we would not be able to generate a temporary at all.  */
4450
          if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4451
            {
4452
              *from_p = TREE_OPERAND (*from_p, 0);
4453
              /* We don't change ret in this case because the
4454
                 WITH_SIZE_EXPR might have been added in
4455
                 gimplify_modify_expr, so returning GS_OK would lead to an
4456
                 infinite loop.  */
4457
              changed = true;
4458
            }
4459
          break;
4460
 
4461
          /* If we're initializing from a container, push the initialization
4462
             inside it.  */
4463
        case CLEANUP_POINT_EXPR:
4464
        case BIND_EXPR:
4465
        case STATEMENT_LIST:
4466
          {
4467
            tree wrap = *from_p;
4468
            tree t;
4469
 
4470
            ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4471
                                 fb_lvalue);
4472
            if (ret != GS_ERROR)
4473
              ret = GS_OK;
4474
 
4475
            t = voidify_wrapper_expr (wrap, *expr_p);
4476
            gcc_assert (t == *expr_p);
4477
 
4478
            if (want_value)
4479
              {
4480
                gimplify_and_add (wrap, pre_p);
4481
                *expr_p = unshare_expr (*to_p);
4482
              }
4483
            else
4484
              *expr_p = wrap;
4485
            return GS_OK;
4486
          }
4487
 
4488
        case COMPOUND_LITERAL_EXPR:
4489
          {
4490
            tree complit = TREE_OPERAND (*expr_p, 1);
4491
            tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4492
            tree decl = DECL_EXPR_DECL (decl_s);
4493
            tree init = DECL_INITIAL (decl);
4494
 
4495
            /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4496
               into struct T x = { 0, 1, 2 } if the address of the
4497
               compound literal has never been taken.  */
4498
            if (!TREE_ADDRESSABLE (complit)
4499
                && !TREE_ADDRESSABLE (decl)
4500
                && init)
4501
              {
4502
                *expr_p = copy_node (*expr_p);
4503
                TREE_OPERAND (*expr_p, 1) = init;
4504
                return GS_OK;
4505
              }
4506
          }
4507
 
4508
        default:
4509
          break;
4510
        }
4511
    }
4512
  while (changed);
4513
 
4514
  return ret;
4515
}
4516
 
4517
/* Promote partial stores to COMPLEX variables to total stores.  *EXPR_P is
4518
   a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4519
   DECL_GIMPLE_REG_P set.
4520
 
4521
   IMPORTANT NOTE: This promotion is performed by introducing a load of the
4522
   other, unmodified part of the complex object just before the total store.
4523
   As a consequence, if the object is still uninitialized, an undefined value
4524
   will be loaded into a register, which may result in a spurious exception
4525
   if the register is floating-point and the value happens to be a signaling
4526
   NaN for example.  Then the fully-fledged complex operations lowering pass
4527
   followed by a DCE pass are necessary in order to fix things up.  */
4528
 
4529
static enum gimplify_status
4530
gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4531
                                   bool want_value)
4532
{
4533
  enum tree_code code, ocode;
4534
  tree lhs, rhs, new_rhs, other, realpart, imagpart;
4535
 
4536
  lhs = TREE_OPERAND (*expr_p, 0);
4537
  rhs = TREE_OPERAND (*expr_p, 1);
4538
  code = TREE_CODE (lhs);
4539
  lhs = TREE_OPERAND (lhs, 0);
4540
 
4541
  ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4542
  other = build1 (ocode, TREE_TYPE (rhs), lhs);
4543
  TREE_NO_WARNING (other) = 1;
4544
  other = get_formal_tmp_var (other, pre_p);
4545
 
4546
  realpart = code == REALPART_EXPR ? rhs : other;
4547
  imagpart = code == REALPART_EXPR ? other : rhs;
4548
 
4549
  if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4550
    new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4551
  else
4552
    new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4553
 
4554
  gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4555
  *expr_p = (want_value) ? rhs : NULL_TREE;
4556
 
4557
  return GS_ALL_DONE;
4558
}
4559
 
4560
/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4561
 
4562
      modify_expr
4563
              : varname '=' rhs
4564
              | '*' ID '=' rhs
4565
 
4566
    PRE_P points to the list where side effects that must happen before
4567
        *EXPR_P should be stored.
4568
 
4569
    POST_P points to the list where side effects that must happen after
4570
        *EXPR_P should be stored.
4571
 
4572
    WANT_VALUE is nonzero iff we want to use the value of this expression
4573
        in another expression.  */
4574
 
4575
static enum gimplify_status
4576
gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4577
                      bool want_value)
4578
{
4579
  tree *from_p = &TREE_OPERAND (*expr_p, 1);
4580
  tree *to_p = &TREE_OPERAND (*expr_p, 0);
4581
  enum gimplify_status ret = GS_UNHANDLED;
4582
  gimple assign;
4583
  location_t loc = EXPR_LOCATION (*expr_p);
4584
 
4585
  gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4586
              || TREE_CODE (*expr_p) == INIT_EXPR);
4587
 
4588
  /* Trying to simplify a clobber using normal logic doesn't work,
4589
     so handle it here.  */
4590
  if (TREE_CLOBBER_P (*from_p))
4591
    {
4592
      gcc_assert (!want_value && TREE_CODE (*to_p) == VAR_DECL);
4593
      gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4594
      *expr_p = NULL;
4595
      return GS_ALL_DONE;
4596
    }
4597
 
4598
  /* Insert pointer conversions required by the middle-end that are not
4599
     required by the frontend.  This fixes middle-end type checking for
4600
     for example gcc.dg/redecl-6.c.  */
4601
  if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4602
    {
4603
      STRIP_USELESS_TYPE_CONVERSION (*from_p);
4604
      if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4605
        *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4606
    }
4607
 
4608
  /* See if any simplifications can be done based on what the RHS is.  */
4609
  ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4610
                                  want_value);
4611
  if (ret != GS_UNHANDLED)
4612
    return ret;
4613
 
4614
  /* For zero sized types only gimplify the left hand side and right hand
4615
     side as statements and throw away the assignment.  Do this after
4616
     gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4617
     types properly.  */
4618
  if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4619
    {
4620
      gimplify_stmt (from_p, pre_p);
4621
      gimplify_stmt (to_p, pre_p);
4622
      *expr_p = NULL_TREE;
4623
      return GS_ALL_DONE;
4624
    }
4625
 
4626
  /* If the value being copied is of variable width, compute the length
4627
     of the copy into a WITH_SIZE_EXPR.   Note that we need to do this
4628
     before gimplifying any of the operands so that we can resolve any
4629
     PLACEHOLDER_EXPRs in the size.  Also note that the RTL expander uses
4630
     the size of the expression to be copied, not of the destination, so
4631
     that is what we must do here.  */
4632
  maybe_with_size_expr (from_p);
4633
 
4634
  ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4635
  if (ret == GS_ERROR)
4636
    return ret;
4637
 
4638
  /* As a special case, we have to temporarily allow for assignments
4639
     with a CALL_EXPR on the RHS.  Since in GIMPLE a function call is
4640
     a toplevel statement, when gimplifying the GENERIC expression
4641
     MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4642
     GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4643
 
4644
     Instead, we need to create the tuple GIMPLE_CALL <a, foo>.  To
4645
     prevent gimplify_expr from trying to create a new temporary for
4646
     foo's LHS, we tell it that it should only gimplify until it
4647
     reaches the CALL_EXPR.  On return from gimplify_expr, the newly
4648
     created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4649
     and all we need to do here is set 'a' to be its LHS.  */
4650
  ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4651
                       fb_rvalue);
4652
  if (ret == GS_ERROR)
4653
    return ret;
4654
 
4655
  /* Now see if the above changed *from_p to something we handle specially.  */
4656
  ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4657
                                  want_value);
4658
  if (ret != GS_UNHANDLED)
4659
    return ret;
4660
 
4661
  /* If we've got a variable sized assignment between two lvalues (i.e. does
4662
     not involve a call), then we can make things a bit more straightforward
4663
     by converting the assignment to memcpy or memset.  */
4664
  if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4665
    {
4666
      tree from = TREE_OPERAND (*from_p, 0);
4667
      tree size = TREE_OPERAND (*from_p, 1);
4668
 
4669
      if (TREE_CODE (from) == CONSTRUCTOR)
4670
        return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4671
 
4672
      if (is_gimple_addressable (from))
4673
        {
4674
          *from_p = from;
4675
          return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4676
                                                 pre_p);
4677
        }
4678
    }
4679
 
4680
  /* Transform partial stores to non-addressable complex variables into
4681
     total stores.  This allows us to use real instead of virtual operands
4682
     for these variables, which improves optimization.  */
4683
  if ((TREE_CODE (*to_p) == REALPART_EXPR
4684
       || TREE_CODE (*to_p) == IMAGPART_EXPR)
4685
      && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4686
    return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4687
 
4688
  /* Try to alleviate the effects of the gimplification creating artificial
4689
     temporaries (see for example is_gimple_reg_rhs) on the debug info.  */
4690
  if (!gimplify_ctxp->into_ssa
4691
      && TREE_CODE (*from_p) == VAR_DECL
4692
      && DECL_IGNORED_P (*from_p)
4693
      && DECL_P (*to_p)
4694
      && !DECL_IGNORED_P (*to_p))
4695
    {
4696
      if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4697
        DECL_NAME (*from_p)
4698
          = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4699
      DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4700
      SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4701
   }
4702
 
4703
  if (want_value && TREE_THIS_VOLATILE (*to_p))
4704
    *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4705
 
4706
  if (TREE_CODE (*from_p) == CALL_EXPR)
4707
    {
4708
      /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4709
         instead of a GIMPLE_ASSIGN.  */
4710
      tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4711
      CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4712
      STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4713
      assign = gimple_build_call_from_tree (*from_p);
4714
      gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
4715
      if (!gimple_call_noreturn_p (assign))
4716
        gimple_call_set_lhs (assign, *to_p);
4717
    }
4718
  else
4719
    {
4720
      assign = gimple_build_assign (*to_p, *from_p);
4721
      gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4722
    }
4723
 
4724
  gimplify_seq_add_stmt (pre_p, assign);
4725
 
4726
  if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4727
    {
4728
      /* If we've somehow already got an SSA_NAME on the LHS, then
4729
         we've probably modified it twice.  Not good.  */
4730
      gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4731
      *to_p = make_ssa_name (*to_p, assign);
4732
      gimple_set_lhs (assign, *to_p);
4733
    }
4734
 
4735
  if (want_value)
4736
    {
4737
      *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4738
      return GS_OK;
4739
    }
4740
  else
4741
    *expr_p = NULL;
4742
 
4743
  return GS_ALL_DONE;
4744
}
4745
 
4746
/* Gimplify a comparison between two variable-sized objects.  Do this
4747
   with a call to BUILT_IN_MEMCMP.  */
4748
 
4749
static enum gimplify_status
4750
gimplify_variable_sized_compare (tree *expr_p)
4751
{
4752
  location_t loc = EXPR_LOCATION (*expr_p);
4753
  tree op0 = TREE_OPERAND (*expr_p, 0);
4754
  tree op1 = TREE_OPERAND (*expr_p, 1);
4755
  tree t, arg, dest, src, expr;
4756
 
4757
  arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4758
  arg = unshare_expr (arg);
4759
  arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4760
  src = build_fold_addr_expr_loc (loc, op1);
4761
  dest = build_fold_addr_expr_loc (loc, op0);
4762
  t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4763
  t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4764
 
4765
  expr
4766
    = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4767
  SET_EXPR_LOCATION (expr, loc);
4768
  *expr_p = expr;
4769
 
4770
  return GS_OK;
4771
}
4772
 
4773
/* Gimplify a comparison between two aggregate objects of integral scalar
4774
   mode as a comparison between the bitwise equivalent scalar values.  */
4775
 
4776
static enum gimplify_status
4777
gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4778
{
4779
  location_t loc = EXPR_LOCATION (*expr_p);
4780
  tree op0 = TREE_OPERAND (*expr_p, 0);
4781
  tree op1 = TREE_OPERAND (*expr_p, 1);
4782
 
4783
  tree type = TREE_TYPE (op0);
4784
  tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4785
 
4786
  op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4787
  op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4788
 
4789
  *expr_p
4790
    = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4791
 
4792
  return GS_OK;
4793
}
4794
 
4795
/* Gimplify an expression sequence.  This function gimplifies each
4796
   expression and rewrites the original expression with the last
4797
   expression of the sequence in GIMPLE form.
4798
 
4799
   PRE_P points to the list where the side effects for all the
4800
       expressions in the sequence will be emitted.
4801
 
4802
   WANT_VALUE is true when the result of the last COMPOUND_EXPR is used.  */
4803
 
4804
static enum gimplify_status
4805
gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4806
{
4807
  tree t = *expr_p;
4808
 
4809
  do
4810
    {
4811
      tree *sub_p = &TREE_OPERAND (t, 0);
4812
 
4813
      if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4814
        gimplify_compound_expr (sub_p, pre_p, false);
4815
      else
4816
        gimplify_stmt (sub_p, pre_p);
4817
 
4818
      t = TREE_OPERAND (t, 1);
4819
    }
4820
  while (TREE_CODE (t) == COMPOUND_EXPR);
4821
 
4822
  *expr_p = t;
4823
  if (want_value)
4824
    return GS_OK;
4825
  else
4826
    {
4827
      gimplify_stmt (expr_p, pre_p);
4828
      return GS_ALL_DONE;
4829
    }
4830
}
4831
 
4832
/* Gimplify a SAVE_EXPR node.  EXPR_P points to the expression to
4833
   gimplify.  After gimplification, EXPR_P will point to a new temporary
4834
   that holds the original value of the SAVE_EXPR node.
4835
 
4836
   PRE_P points to the list where side effects that must happen before
4837
   *EXPR_P should be stored.  */
4838
 
4839
static enum gimplify_status
4840
gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4841
{
4842
  enum gimplify_status ret = GS_ALL_DONE;
4843
  tree val;
4844
 
4845
  gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4846
  val = TREE_OPERAND (*expr_p, 0);
4847
 
4848
  /* If the SAVE_EXPR has not been resolved, then evaluate it once.  */
4849
  if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4850
    {
4851
      /* The operand may be a void-valued expression such as SAVE_EXPRs
4852
         generated by the Java frontend for class initialization.  It is
4853
         being executed only for its side-effects.  */
4854
      if (TREE_TYPE (val) == void_type_node)
4855
        {
4856
          ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4857
                               is_gimple_stmt, fb_none);
4858
          val = NULL;
4859
        }
4860
      else
4861
        val = get_initialized_tmp_var (val, pre_p, post_p);
4862
 
4863
      TREE_OPERAND (*expr_p, 0) = val;
4864
      SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4865
    }
4866
 
4867
  *expr_p = val;
4868
 
4869
  return ret;
4870
}
4871
 
4872
/* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4873
 
4874
      unary_expr
4875
              : ...
4876
              | '&' varname
4877
              ...
4878
 
4879
    PRE_P points to the list where side effects that must happen before
4880
        *EXPR_P should be stored.
4881
 
4882
    POST_P points to the list where side effects that must happen after
4883
        *EXPR_P should be stored.  */
4884
 
4885
static enum gimplify_status
4886
gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4887
{
4888
  tree expr = *expr_p;
4889
  tree op0 = TREE_OPERAND (expr, 0);
4890
  enum gimplify_status ret;
4891
  location_t loc = EXPR_LOCATION (*expr_p);
4892
 
4893
  switch (TREE_CODE (op0))
4894
    {
4895
    case INDIRECT_REF:
4896
    do_indirect_ref:
4897
      /* Check if we are dealing with an expression of the form '&*ptr'.
4898
         While the front end folds away '&*ptr' into 'ptr', these
4899
         expressions may be generated internally by the compiler (e.g.,
4900
         builtins like __builtin_va_end).  */
4901
      /* Caution: the silent array decomposition semantics we allow for
4902
         ADDR_EXPR means we can't always discard the pair.  */
4903
      /* Gimplification of the ADDR_EXPR operand may drop
4904
         cv-qualification conversions, so make sure we add them if
4905
         needed.  */
4906
      {
4907
        tree op00 = TREE_OPERAND (op0, 0);
4908
        tree t_expr = TREE_TYPE (expr);
4909
        tree t_op00 = TREE_TYPE (op00);
4910
 
4911
        if (!useless_type_conversion_p (t_expr, t_op00))
4912
          op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4913
        *expr_p = op00;
4914
        ret = GS_OK;
4915
      }
4916
      break;
4917
 
4918
    case VIEW_CONVERT_EXPR:
4919
      /* Take the address of our operand and then convert it to the type of
4920
         this ADDR_EXPR.
4921
 
4922
         ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4923
         all clear.  The impact of this transformation is even less clear.  */
4924
 
4925
      /* If the operand is a useless conversion, look through it.  Doing so
4926
         guarantees that the ADDR_EXPR and its operand will remain of the
4927
         same type.  */
4928
      if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4929
        op0 = TREE_OPERAND (op0, 0);
4930
 
4931
      *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4932
                                  build_fold_addr_expr_loc (loc,
4933
                                                        TREE_OPERAND (op0, 0)));
4934
      ret = GS_OK;
4935
      break;
4936
 
4937
    default:
4938
      /* We use fb_either here because the C frontend sometimes takes
4939
         the address of a call that returns a struct; see
4940
         gcc.dg/c99-array-lval-1.c.  The gimplifier will correctly make
4941
         the implied temporary explicit.  */
4942
 
4943
      /* Make the operand addressable.  */
4944
      ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4945
                           is_gimple_addressable, fb_either);
4946
      if (ret == GS_ERROR)
4947
        break;
4948
 
4949
      /* Then mark it.  Beware that it may not be possible to do so directly
4950
         if a temporary has been created by the gimplification.  */
4951
      prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4952
 
4953
      op0 = TREE_OPERAND (expr, 0);
4954
 
4955
      /* For various reasons, the gimplification of the expression
4956
         may have made a new INDIRECT_REF.  */
4957
      if (TREE_CODE (op0) == INDIRECT_REF)
4958
        goto do_indirect_ref;
4959
 
4960
      mark_addressable (TREE_OPERAND (expr, 0));
4961
 
4962
      /* The FEs may end up building ADDR_EXPRs early on a decl with
4963
         an incomplete type.  Re-build ADDR_EXPRs in canonical form
4964
         here.  */
4965
      if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4966
        *expr_p = build_fold_addr_expr (op0);
4967
 
4968
      /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly.  */
4969
      recompute_tree_invariant_for_addr_expr (*expr_p);
4970
 
4971
      /* If we re-built the ADDR_EXPR add a conversion to the original type
4972
         if required.  */
4973
      if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4974
        *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4975
 
4976
      break;
4977
    }
4978
 
4979
  return ret;
4980
}
4981
 
4982
/* Gimplify the operands of an ASM_EXPR.  Input operands should be a gimple
4983
   value; output operands should be a gimple lvalue.  */
4984
 
4985
static enum gimplify_status
4986
gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4987
{
4988
  tree expr;
4989
  int noutputs;
4990
  const char **oconstraints;
4991
  int i;
4992
  tree link;
4993
  const char *constraint;
4994
  bool allows_mem, allows_reg, is_inout;
4995
  enum gimplify_status ret, tret;
4996
  gimple stmt;
4997
  VEC(tree, gc) *inputs;
4998
  VEC(tree, gc) *outputs;
4999
  VEC(tree, gc) *clobbers;
5000
  VEC(tree, gc) *labels;
5001
  tree link_next;
5002
 
5003
  expr = *expr_p;
5004
  noutputs = list_length (ASM_OUTPUTS (expr));
5005
  oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5006
 
5007
  inputs = outputs = clobbers = labels = NULL;
5008
 
5009
  ret = GS_ALL_DONE;
5010
  link_next = NULL_TREE;
5011
  for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5012
    {
5013
      bool ok;
5014
      size_t constraint_len;
5015
 
5016
      link_next = TREE_CHAIN (link);
5017
 
5018
      oconstraints[i]
5019
        = constraint
5020
        = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5021
      constraint_len = strlen (constraint);
5022
      if (constraint_len == 0)
5023
        continue;
5024
 
5025
      ok = parse_output_constraint (&constraint, i, 0, 0,
5026
                                    &allows_mem, &allows_reg, &is_inout);
5027
      if (!ok)
5028
        {
5029
          ret = GS_ERROR;
5030
          is_inout = false;
5031
        }
5032
 
5033
      if (!allows_reg && allows_mem)
5034
        mark_addressable (TREE_VALUE (link));
5035
 
5036
      tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5037
                            is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5038
                            fb_lvalue | fb_mayfail);
5039
      if (tret == GS_ERROR)
5040
        {
5041
          error ("invalid lvalue in asm output %d", i);
5042
          ret = tret;
5043
        }
5044
 
5045
      VEC_safe_push (tree, gc, outputs, link);
5046
      TREE_CHAIN (link) = NULL_TREE;
5047
 
5048
      if (is_inout)
5049
        {
5050
          /* An input/output operand.  To give the optimizers more
5051
             flexibility, split it into separate input and output
5052
             operands.  */
5053
          tree input;
5054
          char buf[10];
5055
 
5056
          /* Turn the in/out constraint into an output constraint.  */
5057
          char *p = xstrdup (constraint);
5058
          p[0] = '=';
5059
          TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5060
 
5061
          /* And add a matching input constraint.  */
5062
          if (allows_reg)
5063
            {
5064
              sprintf (buf, "%d", i);
5065
 
5066
              /* If there are multiple alternatives in the constraint,
5067
                 handle each of them individually.  Those that allow register
5068
                 will be replaced with operand number, the others will stay
5069
                 unchanged.  */
5070
              if (strchr (p, ',') != NULL)
5071
                {
5072
                  size_t len = 0, buflen = strlen (buf);
5073
                  char *beg, *end, *str, *dst;
5074
 
5075
                  for (beg = p + 1;;)
5076
                    {
5077
                      end = strchr (beg, ',');
5078
                      if (end == NULL)
5079
                        end = strchr (beg, '\0');
5080
                      if ((size_t) (end - beg) < buflen)
5081
                        len += buflen + 1;
5082
                      else
5083
                        len += end - beg + 1;
5084
                      if (*end)
5085
                        beg = end + 1;
5086
                      else
5087
                        break;
5088
                    }
5089
 
5090
                  str = (char *) alloca (len);
5091
                  for (beg = p + 1, dst = str;;)
5092
                    {
5093
                      const char *tem;
5094
                      bool mem_p, reg_p, inout_p;
5095
 
5096
                      end = strchr (beg, ',');
5097
                      if (end)
5098
                        *end = '\0';
5099
                      beg[-1] = '=';
5100
                      tem = beg - 1;
5101
                      parse_output_constraint (&tem, i, 0, 0,
5102
                                               &mem_p, &reg_p, &inout_p);
5103
                      if (dst != str)
5104
                        *dst++ = ',';
5105
                      if (reg_p)
5106
                        {
5107
                          memcpy (dst, buf, buflen);
5108
                          dst += buflen;
5109
                        }
5110
                      else
5111
                        {
5112
                          if (end)
5113
                            len = end - beg;
5114
                          else
5115
                            len = strlen (beg);
5116
                          memcpy (dst, beg, len);
5117
                          dst += len;
5118
                        }
5119
                      if (end)
5120
                        beg = end + 1;
5121
                      else
5122
                        break;
5123
                    }
5124
                  *dst = '\0';
5125
                  input = build_string (dst - str, str);
5126
                }
5127
              else
5128
                input = build_string (strlen (buf), buf);
5129
            }
5130
          else
5131
            input = build_string (constraint_len - 1, constraint + 1);
5132
 
5133
          free (p);
5134
 
5135
          input = build_tree_list (build_tree_list (NULL_TREE, input),
5136
                                   unshare_expr (TREE_VALUE (link)));
5137
          ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5138
        }
5139
    }
5140
 
5141
  link_next = NULL_TREE;
5142
  for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5143
    {
5144
      link_next = TREE_CHAIN (link);
5145
      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5146
      parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5147
                              oconstraints, &allows_mem, &allows_reg);
5148
 
5149
      /* If we can't make copies, we can only accept memory.  */
5150
      if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5151
        {
5152
          if (allows_mem)
5153
            allows_reg = 0;
5154
          else
5155
            {
5156
              error ("impossible constraint in %<asm%>");
5157
              error ("non-memory input %d must stay in memory", i);
5158
              return GS_ERROR;
5159
            }
5160
        }
5161
 
5162
      /* If the operand is a memory input, it should be an lvalue.  */
5163
      if (!allows_reg && allows_mem)
5164
        {
5165
          tree inputv = TREE_VALUE (link);
5166
          STRIP_NOPS (inputv);
5167
          if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5168
              || TREE_CODE (inputv) == PREINCREMENT_EXPR
5169
              || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5170
              || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5171
            TREE_VALUE (link) = error_mark_node;
5172
          tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5173
                                is_gimple_lvalue, fb_lvalue | fb_mayfail);
5174
          mark_addressable (TREE_VALUE (link));
5175
          if (tret == GS_ERROR)
5176
            {
5177
              if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5178
                input_location = EXPR_LOCATION (TREE_VALUE (link));
5179
              error ("memory input %d is not directly addressable", i);
5180
              ret = tret;
5181
            }
5182
        }
5183
      else
5184
        {
5185
          tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5186
                                is_gimple_asm_val, fb_rvalue);
5187
          if (tret == GS_ERROR)
5188
            ret = tret;
5189
        }
5190
 
5191
      TREE_CHAIN (link) = NULL_TREE;
5192
      VEC_safe_push (tree, gc, inputs, link);
5193
    }
5194
 
5195
  for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
5196
    VEC_safe_push (tree, gc, clobbers, link);
5197
 
5198
  for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link))
5199
    VEC_safe_push (tree, gc, labels, link);
5200
 
5201
  /* Do not add ASMs with errors to the gimple IL stream.  */
5202
  if (ret != GS_ERROR)
5203
    {
5204
      stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5205
                                   inputs, outputs, clobbers, labels);
5206
 
5207
      gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5208
      gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5209
 
5210
      gimplify_seq_add_stmt (pre_p, stmt);
5211
    }
5212
 
5213
  return ret;
5214
}
5215
 
5216
/* Gimplify a CLEANUP_POINT_EXPR.  Currently this works by adding
5217
   GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5218
   gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5219
   return to this function.
5220
 
5221
   FIXME should we complexify the prequeue handling instead?  Or use flags
5222
   for all the cleanups and let the optimizer tighten them up?  The current
5223
   code seems pretty fragile; it will break on a cleanup within any
5224
   non-conditional nesting.  But any such nesting would be broken, anyway;
5225
   we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5226
   and continues out of it.  We can do that at the RTL level, though, so
5227
   having an optimizer to tighten up try/finally regions would be a Good
5228
   Thing.  */
5229
 
5230
static enum gimplify_status
5231
gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5232
{
5233
  gimple_stmt_iterator iter;
5234
  gimple_seq body_sequence = NULL;
5235
 
5236
  tree temp = voidify_wrapper_expr (*expr_p, NULL);
5237
 
5238
  /* We only care about the number of conditions between the innermost
5239
     CLEANUP_POINT_EXPR and the cleanup.  So save and reset the count and
5240
     any cleanups collected outside the CLEANUP_POINT_EXPR.  */
5241
  int old_conds = gimplify_ctxp->conditions;
5242
  gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5243
  bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5244
  gimplify_ctxp->conditions = 0;
5245
  gimplify_ctxp->conditional_cleanups = NULL;
5246
  gimplify_ctxp->in_cleanup_point_expr = true;
5247
 
5248
  gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5249
 
5250
  gimplify_ctxp->conditions = old_conds;
5251
  gimplify_ctxp->conditional_cleanups = old_cleanups;
5252
  gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5253
 
5254
  for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5255
    {
5256
      gimple wce = gsi_stmt (iter);
5257
 
5258
      if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5259
        {
5260
          if (gsi_one_before_end_p (iter))
5261
            {
5262
              /* Note that gsi_insert_seq_before and gsi_remove do not
5263
                 scan operands, unlike some other sequence mutators.  */
5264
              if (!gimple_wce_cleanup_eh_only (wce))
5265
                gsi_insert_seq_before_without_update (&iter,
5266
                                                      gimple_wce_cleanup (wce),
5267
                                                      GSI_SAME_STMT);
5268
              gsi_remove (&iter, true);
5269
              break;
5270
            }
5271
          else
5272
            {
5273
              gimple gtry;
5274
              gimple_seq seq;
5275
              enum gimple_try_flags kind;
5276
 
5277
              if (gimple_wce_cleanup_eh_only (wce))
5278
                kind = GIMPLE_TRY_CATCH;
5279
              else
5280
                kind = GIMPLE_TRY_FINALLY;
5281
              seq = gsi_split_seq_after (iter);
5282
 
5283
              gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5284
              /* Do not use gsi_replace here, as it may scan operands.
5285
                 We want to do a simple structural modification only.  */
5286
              *gsi_stmt_ptr (&iter) = gtry;
5287
              iter = gsi_start (seq);
5288
            }
5289
        }
5290
      else
5291
        gsi_next (&iter);
5292
    }
5293
 
5294
  gimplify_seq_add_seq (pre_p, body_sequence);
5295
  if (temp)
5296
    {
5297
      *expr_p = temp;
5298
      return GS_OK;
5299
    }
5300
  else
5301
    {
5302
      *expr_p = NULL;
5303
      return GS_ALL_DONE;
5304
    }
5305
}
5306
 
5307
/* Insert a cleanup marker for gimplify_cleanup_point_expr.  CLEANUP
5308
   is the cleanup action required.  EH_ONLY is true if the cleanup should
5309
   only be executed if an exception is thrown, not on normal exit.  */
5310
 
5311
static void
5312
gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5313
{
5314
  gimple wce;
5315
  gimple_seq cleanup_stmts = NULL;
5316
 
5317
  /* Errors can result in improperly nested cleanups.  Which results in
5318
     confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR.  */
5319
  if (seen_error ())
5320
    return;
5321
 
5322
  if (gimple_conditional_context ())
5323
    {
5324
      /* If we're in a conditional context, this is more complex.  We only
5325
         want to run the cleanup if we actually ran the initialization that
5326
         necessitates it, but we want to run it after the end of the
5327
         conditional context.  So we wrap the try/finally around the
5328
         condition and use a flag to determine whether or not to actually
5329
         run the destructor.  Thus
5330
 
5331
           test ? f(A()) : 0
5332
 
5333
         becomes (approximately)
5334
 
5335
           flag = 0;
5336
           try {
5337
             if (test) { A::A(temp); flag = 1; val = f(temp); }
5338
             else { val = 0; }
5339
           } finally {
5340
             if (flag) A::~A(temp);
5341
           }
5342
           val
5343
      */
5344
      tree flag = create_tmp_var (boolean_type_node, "cleanup");
5345
      gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5346
      gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5347
 
5348
      cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5349
      gimplify_stmt (&cleanup, &cleanup_stmts);
5350
      wce = gimple_build_wce (cleanup_stmts);
5351
 
5352
      gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5353
      gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5354
      gimplify_seq_add_stmt (pre_p, ftrue);
5355
 
5356
      /* Because of this manipulation, and the EH edges that jump
5357
         threading cannot redirect, the temporary (VAR) will appear
5358
         to be used uninitialized.  Don't warn.  */
5359
      TREE_NO_WARNING (var) = 1;
5360
    }
5361
  else
5362
    {
5363
      gimplify_stmt (&cleanup, &cleanup_stmts);
5364
      wce = gimple_build_wce (cleanup_stmts);
5365
      gimple_wce_set_cleanup_eh_only (wce, eh_only);
5366
      gimplify_seq_add_stmt (pre_p, wce);
5367
    }
5368
}
5369
 
5370
/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR.  */
5371
 
5372
static enum gimplify_status
5373
gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5374
{
5375
  tree targ = *expr_p;
5376
  tree temp = TARGET_EXPR_SLOT (targ);
5377
  tree init = TARGET_EXPR_INITIAL (targ);
5378
  enum gimplify_status ret;
5379
 
5380
  if (init)
5381
    {
5382
      tree cleanup = NULL_TREE;
5383
 
5384
      /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5385
         to the temps list.  Handle also variable length TARGET_EXPRs.  */
5386
      if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5387
        {
5388
          if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5389
            gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5390
          gimplify_vla_decl (temp, pre_p);
5391
        }
5392
      else
5393
        gimple_add_tmp_var (temp);
5394
 
5395
      /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5396
         expression is supposed to initialize the slot.  */
5397
      if (VOID_TYPE_P (TREE_TYPE (init)))
5398
        ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5399
      else
5400
        {
5401
          tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5402
          init = init_expr;
5403
          ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5404
          init = NULL;
5405
          ggc_free (init_expr);
5406
        }
5407
      if (ret == GS_ERROR)
5408
        {
5409
          /* PR c++/28266 Make sure this is expanded only once. */
5410
          TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5411
          return GS_ERROR;
5412
        }
5413
      if (init)
5414
        gimplify_and_add (init, pre_p);
5415
 
5416
      /* If needed, push the cleanup for the temp.  */
5417
      if (TARGET_EXPR_CLEANUP (targ))
5418
        {
5419
          if (CLEANUP_EH_ONLY (targ))
5420
            gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5421
                                 CLEANUP_EH_ONLY (targ), pre_p);
5422
          else
5423
            cleanup = TARGET_EXPR_CLEANUP (targ);
5424
        }
5425
 
5426
      /* Add a clobber for the temporary going out of scope, like
5427
         gimplify_bind_expr.  */
5428
      if (gimplify_ctxp->in_cleanup_point_expr
5429
          && needs_to_live_in_memory (temp))
5430
        {
5431
          tree clobber = build_constructor (TREE_TYPE (temp), NULL);
5432
          TREE_THIS_VOLATILE (clobber) = true;
5433
          clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5434
          if (cleanup)
5435
            cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5436
                              clobber);
5437
          else
5438
            cleanup = clobber;
5439
        }
5440
 
5441
      if (cleanup)
5442
        gimple_push_cleanup (temp, cleanup, false, pre_p);
5443
 
5444
      /* Only expand this once.  */
5445
      TREE_OPERAND (targ, 3) = init;
5446
      TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5447
    }
5448
  else
5449
    /* We should have expanded this before.  */
5450
    gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5451
 
5452
  *expr_p = temp;
5453
  return GS_OK;
5454
}
5455
 
5456
/* Gimplification of expression trees.  */
5457
 
5458
/* Gimplify an expression which appears at statement context.  The
5459
   corresponding GIMPLE statements are added to *SEQ_P.  If *SEQ_P is
5460
   NULL, a new sequence is allocated.
5461
 
5462
   Return true if we actually added a statement to the queue.  */
5463
 
5464
bool
5465
gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5466
{
5467
  gimple_seq_node last;
5468
 
5469
  if (!*seq_p)
5470
    *seq_p = gimple_seq_alloc ();
5471
 
5472
  last = gimple_seq_last (*seq_p);
5473
  gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5474
  return last != gimple_seq_last (*seq_p);
5475
}
5476
 
5477
/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5478
   to CTX.  If entries already exist, force them to be some flavor of private.
5479
   If there is no enclosing parallel, do nothing.  */
5480
 
5481
void
5482
omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5483
{
5484
  splay_tree_node n;
5485
 
5486
  if (decl == NULL || !DECL_P (decl))
5487
    return;
5488
 
5489
  do
5490
    {
5491
      n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5492
      if (n != NULL)
5493
        {
5494
          if (n->value & GOVD_SHARED)
5495
            n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5496
          else
5497
            return;
5498
        }
5499
      else if (ctx->region_type != ORT_WORKSHARE)
5500
        omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5501
 
5502
      ctx = ctx->outer_context;
5503
    }
5504
  while (ctx);
5505
}
5506
 
5507
/* Similarly for each of the type sizes of TYPE.  */
5508
 
5509
static void
5510
omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5511
{
5512
  if (type == NULL || type == error_mark_node)
5513
    return;
5514
  type = TYPE_MAIN_VARIANT (type);
5515
 
5516
  if (pointer_set_insert (ctx->privatized_types, type))
5517
    return;
5518
 
5519
  switch (TREE_CODE (type))
5520
    {
5521
    case INTEGER_TYPE:
5522
    case ENUMERAL_TYPE:
5523
    case BOOLEAN_TYPE:
5524
    case REAL_TYPE:
5525
    case FIXED_POINT_TYPE:
5526
      omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5527
      omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5528
      break;
5529
 
5530
    case ARRAY_TYPE:
5531
      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5532
      omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5533
      break;
5534
 
5535
    case RECORD_TYPE:
5536
    case UNION_TYPE:
5537
    case QUAL_UNION_TYPE:
5538
      {
5539
        tree field;
5540
        for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5541
          if (TREE_CODE (field) == FIELD_DECL)
5542
            {
5543
              omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5544
              omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5545
            }
5546
      }
5547
      break;
5548
 
5549
    case POINTER_TYPE:
5550
    case REFERENCE_TYPE:
5551
      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5552
      break;
5553
 
5554
    default:
5555
      break;
5556
    }
5557
 
5558
  omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5559
  omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5560
  lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5561
}
5562
 
5563
/* Add an entry for DECL in the OpenMP context CTX with FLAGS.  */
5564
 
5565
static void
5566
omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5567
{
5568
  splay_tree_node n;
5569
  unsigned int nflags;
5570
  tree t;
5571
 
5572
  if (error_operand_p (decl))
5573
    return;
5574
 
5575
  /* Never elide decls whose type has TREE_ADDRESSABLE set.  This means
5576
     there are constructors involved somewhere.  */
5577
  if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5578
      || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5579
    flags |= GOVD_SEEN;
5580
 
5581
  n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5582
  if (n != NULL)
5583
    {
5584
      /* We shouldn't be re-adding the decl with the same data
5585
         sharing class.  */
5586
      gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5587
      /* The only combination of data sharing classes we should see is
5588
         FIRSTPRIVATE and LASTPRIVATE.  */
5589
      nflags = n->value | flags;
5590
      gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5591
                  == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5592
      n->value = nflags;
5593
      return;
5594
    }
5595
 
5596
  /* When adding a variable-sized variable, we have to handle all sorts
5597
     of additional bits of data: the pointer replacement variable, and
5598
     the parameters of the type.  */
5599
  if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5600
    {
5601
      /* Add the pointer replacement variable as PRIVATE if the variable
5602
         replacement is private, else FIRSTPRIVATE since we'll need the
5603
         address of the original variable either for SHARED, or for the
5604
         copy into or out of the context.  */
5605
      if (!(flags & GOVD_LOCAL))
5606
        {
5607
          nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5608
          nflags |= flags & GOVD_SEEN;
5609
          t = DECL_VALUE_EXPR (decl);
5610
          gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5611
          t = TREE_OPERAND (t, 0);
5612
          gcc_assert (DECL_P (t));
5613
          omp_add_variable (ctx, t, nflags);
5614
        }
5615
 
5616
      /* Add all of the variable and type parameters (which should have
5617
         been gimplified to a formal temporary) as FIRSTPRIVATE.  */
5618
      omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5619
      omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5620
      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5621
 
5622
      /* The variable-sized variable itself is never SHARED, only some form
5623
         of PRIVATE.  The sharing would take place via the pointer variable
5624
         which we remapped above.  */
5625
      if (flags & GOVD_SHARED)
5626
        flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5627
                | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5628
 
5629
      /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5630
         alloca statement we generate for the variable, so make sure it
5631
         is available.  This isn't automatically needed for the SHARED
5632
         case, since we won't be allocating local storage then.
5633
         For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5634
         in this case omp_notice_variable will be called later
5635
         on when it is gimplified.  */
5636
      else if (! (flags & GOVD_LOCAL)
5637
               && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5638
        omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5639
    }
5640
  else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5641
    {
5642
      gcc_assert ((flags & GOVD_LOCAL) == 0);
5643
      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5644
 
5645
      /* Similar to the direct variable sized case above, we'll need the
5646
         size of references being privatized.  */
5647
      if ((flags & GOVD_SHARED) == 0)
5648
        {
5649
          t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5650
          if (TREE_CODE (t) != INTEGER_CST)
5651
            omp_notice_variable (ctx, t, true);
5652
        }
5653
    }
5654
 
5655
  splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5656
}
5657
 
5658
/* Notice a threadprivate variable DECL used in OpenMP context CTX.
5659
   This just prints out diagnostics about threadprivate variable uses
5660
   in untied tasks.  If DECL2 is non-NULL, prevent this warning
5661
   on that variable.  */
5662
 
5663
static bool
5664
omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5665
                                   tree decl2)
5666
{
5667
  splay_tree_node n;
5668
 
5669
  if (ctx->region_type != ORT_UNTIED_TASK)
5670
    return false;
5671
  n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5672
  if (n == NULL)
5673
    {
5674
      error ("threadprivate variable %qE used in untied task",
5675
             DECL_NAME (decl));
5676
      error_at (ctx->location, "enclosing task");
5677
      splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5678
    }
5679
  if (decl2)
5680
    splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5681
  return false;
5682
}
5683
 
5684
/* Record the fact that DECL was used within the OpenMP context CTX.
5685
   IN_CODE is true when real code uses DECL, and false when we should
5686
   merely emit default(none) errors.  Return true if DECL is going to
5687
   be remapped and thus DECL shouldn't be gimplified into its
5688
   DECL_VALUE_EXPR (if any).  */
5689
 
5690
static bool
5691
omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5692
{
5693
  splay_tree_node n;
5694
  unsigned flags = in_code ? GOVD_SEEN : 0;
5695
  bool ret = false, shared;
5696
 
5697
  if (error_operand_p (decl))
5698
    return false;
5699
 
5700
  /* Threadprivate variables are predetermined.  */
5701
  if (is_global_var (decl))
5702
    {
5703
      if (DECL_THREAD_LOCAL_P (decl))
5704
        return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5705
 
5706
      if (DECL_HAS_VALUE_EXPR_P (decl))
5707
        {
5708
          tree value = get_base_address (DECL_VALUE_EXPR (decl));
5709
 
5710
          if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5711
            return omp_notice_threadprivate_variable (ctx, decl, value);
5712
        }
5713
    }
5714
 
5715
  n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5716
  if (n == NULL)
5717
    {
5718
      enum omp_clause_default_kind default_kind, kind;
5719
      struct gimplify_omp_ctx *octx;
5720
 
5721
      if (ctx->region_type == ORT_WORKSHARE)
5722
        goto do_outer;
5723
 
5724
      /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5725
         remapped firstprivate instead of shared.  To some extent this is
5726
         addressed in omp_firstprivatize_type_sizes, but not effectively.  */
5727
      default_kind = ctx->default_kind;
5728
      kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5729
      if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5730
        default_kind = kind;
5731
 
5732
      switch (default_kind)
5733
        {
5734
        case OMP_CLAUSE_DEFAULT_NONE:
5735
          error ("%qE not specified in enclosing parallel",
5736
                 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5737
          if ((ctx->region_type & ORT_TASK) != 0)
5738
            error_at (ctx->location, "enclosing task");
5739
          else
5740
            error_at (ctx->location, "enclosing parallel");
5741
          /* FALLTHRU */
5742
        case OMP_CLAUSE_DEFAULT_SHARED:
5743
          flags |= GOVD_SHARED;
5744
          break;
5745
        case OMP_CLAUSE_DEFAULT_PRIVATE:
5746
          flags |= GOVD_PRIVATE;
5747
          break;
5748
        case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5749
          flags |= GOVD_FIRSTPRIVATE;
5750
          break;
5751
        case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5752
          /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED.  */
5753
          gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5754
          if (ctx->outer_context)
5755
            omp_notice_variable (ctx->outer_context, decl, in_code);
5756
          for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5757
            {
5758
              splay_tree_node n2;
5759
 
5760
              n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5761
              if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5762
                {
5763
                  flags |= GOVD_FIRSTPRIVATE;
5764
                  break;
5765
                }
5766
              if ((octx->region_type & ORT_PARALLEL) != 0)
5767
                break;
5768
            }
5769
          if (flags & GOVD_FIRSTPRIVATE)
5770
            break;
5771
          if (octx == NULL
5772
              && (TREE_CODE (decl) == PARM_DECL
5773
                  || (!is_global_var (decl)
5774
                      && DECL_CONTEXT (decl) == current_function_decl)))
5775
            {
5776
              flags |= GOVD_FIRSTPRIVATE;
5777
              break;
5778
            }
5779
          flags |= GOVD_SHARED;
5780
          break;
5781
        default:
5782
          gcc_unreachable ();
5783
        }
5784
 
5785
      if ((flags & GOVD_PRIVATE)
5786
          && lang_hooks.decls.omp_private_outer_ref (decl))
5787
        flags |= GOVD_PRIVATE_OUTER_REF;
5788
 
5789
      omp_add_variable (ctx, decl, flags);
5790
 
5791
      shared = (flags & GOVD_SHARED) != 0;
5792
      ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5793
      goto do_outer;
5794
    }
5795
 
5796
  if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5797
      && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5798
      && DECL_SIZE (decl)
5799
      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5800
    {
5801
      splay_tree_node n2;
5802
      tree t = DECL_VALUE_EXPR (decl);
5803
      gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5804
      t = TREE_OPERAND (t, 0);
5805
      gcc_assert (DECL_P (t));
5806
      n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5807
      n2->value |= GOVD_SEEN;
5808
    }
5809
 
5810
  shared = ((flags | n->value) & GOVD_SHARED) != 0;
5811
  ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5812
 
5813
  /* If nothing changed, there's nothing left to do.  */
5814
  if ((n->value & flags) == flags)
5815
    return ret;
5816
  flags |= n->value;
5817
  n->value = flags;
5818
 
5819
 do_outer:
5820
  /* If the variable is private in the current context, then we don't
5821
     need to propagate anything to an outer context.  */
5822
  if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5823
    return ret;
5824
  if (ctx->outer_context
5825
      && omp_notice_variable (ctx->outer_context, decl, in_code))
5826
    return true;
5827
  return ret;
5828
}
5829
 
5830
/* Verify that DECL is private within CTX.  If there's specific information
5831
   to the contrary in the innermost scope, generate an error.  */
5832
 
5833
static bool
5834
omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5835
{
5836
  splay_tree_node n;
5837
 
5838
  n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5839
  if (n != NULL)
5840
    {
5841
      if (n->value & GOVD_SHARED)
5842
        {
5843
          if (ctx == gimplify_omp_ctxp)
5844
            {
5845
              error ("iteration variable %qE should be private",
5846
                     DECL_NAME (decl));
5847
              n->value = GOVD_PRIVATE;
5848
              return true;
5849
            }
5850
          else
5851
            return false;
5852
        }
5853
      else if ((n->value & GOVD_EXPLICIT) != 0
5854
               && (ctx == gimplify_omp_ctxp
5855
                   || (ctx->region_type == ORT_COMBINED_PARALLEL
5856
                       && gimplify_omp_ctxp->outer_context == ctx)))
5857
        {
5858
          if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5859
            error ("iteration variable %qE should not be firstprivate",
5860
                   DECL_NAME (decl));
5861
          else if ((n->value & GOVD_REDUCTION) != 0)
5862
            error ("iteration variable %qE should not be reduction",
5863
                   DECL_NAME (decl));
5864
        }
5865
      return (ctx == gimplify_omp_ctxp
5866
              || (ctx->region_type == ORT_COMBINED_PARALLEL
5867
                  && gimplify_omp_ctxp->outer_context == ctx));
5868
    }
5869
 
5870
  if (ctx->region_type != ORT_WORKSHARE)
5871
    return false;
5872
  else if (ctx->outer_context)
5873
    return omp_is_private (ctx->outer_context, decl);
5874
  return false;
5875
}
5876
 
5877
/* Return true if DECL is private within a parallel region
5878
   that binds to the current construct's context or in parallel
5879
   region's REDUCTION clause.  */
5880
 
5881
static bool
5882
omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5883
{
5884
  splay_tree_node n;
5885
 
5886
  do
5887
    {
5888
      ctx = ctx->outer_context;
5889
      if (ctx == NULL)
5890
        return !(is_global_var (decl)
5891
                 /* References might be private, but might be shared too.  */
5892
                 || lang_hooks.decls.omp_privatize_by_reference (decl));
5893
 
5894
      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5895
      if (n != NULL)
5896
        return (n->value & GOVD_SHARED) == 0;
5897
    }
5898
  while (ctx->region_type == ORT_WORKSHARE);
5899
  return false;
5900
}
5901
 
5902
/* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5903
   and previous omp contexts.  */
5904
 
5905
static void
5906
gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5907
                           enum omp_region_type region_type)
5908
{
5909
  struct gimplify_omp_ctx *ctx, *outer_ctx;
5910
  struct gimplify_ctx gctx;
5911
  tree c;
5912
 
5913
  ctx = new_omp_context (region_type);
5914
  outer_ctx = ctx->outer_context;
5915
 
5916
  while ((c = *list_p) != NULL)
5917
    {
5918
      bool remove = false;
5919
      bool notice_outer = true;
5920
      const char *check_non_private = NULL;
5921
      unsigned int flags;
5922
      tree decl;
5923
 
5924
      switch (OMP_CLAUSE_CODE (c))
5925
        {
5926
        case OMP_CLAUSE_PRIVATE:
5927
          flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5928
          if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5929
            {
5930
              flags |= GOVD_PRIVATE_OUTER_REF;
5931
              OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5932
            }
5933
          else
5934
            notice_outer = false;
5935
          goto do_add;
5936
        case OMP_CLAUSE_SHARED:
5937
          flags = GOVD_SHARED | GOVD_EXPLICIT;
5938
          goto do_add;
5939
        case OMP_CLAUSE_FIRSTPRIVATE:
5940
          flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5941
          check_non_private = "firstprivate";
5942
          goto do_add;
5943
        case OMP_CLAUSE_LASTPRIVATE:
5944
          flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5945
          check_non_private = "lastprivate";
5946
          goto do_add;
5947
        case OMP_CLAUSE_REDUCTION:
5948
          flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5949
          check_non_private = "reduction";
5950
          goto do_add;
5951
 
5952
        do_add:
5953
          decl = OMP_CLAUSE_DECL (c);
5954
          if (error_operand_p (decl))
5955
            {
5956
              remove = true;
5957
              break;
5958
            }
5959
          omp_add_variable (ctx, decl, flags);
5960
          if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5961
              && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5962
            {
5963
              omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5964
                                GOVD_LOCAL | GOVD_SEEN);
5965
              gimplify_omp_ctxp = ctx;
5966
              push_gimplify_context (&gctx);
5967
 
5968
              OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5969
              OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5970
 
5971
              gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5972
                                &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5973
              pop_gimplify_context
5974
                (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5975
              push_gimplify_context (&gctx);
5976
              gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5977
                                &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5978
              pop_gimplify_context
5979
                (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5980
              OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5981
              OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5982
 
5983
              gimplify_omp_ctxp = outer_ctx;
5984
            }
5985
          else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5986
                   && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5987
            {
5988
              gimplify_omp_ctxp = ctx;
5989
              push_gimplify_context (&gctx);
5990
              if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5991
                {
5992
                  tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5993
                                      NULL, NULL);
5994
                  TREE_SIDE_EFFECTS (bind) = 1;
5995
                  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5996
                  OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5997
                }
5998
              gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5999
                                &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6000
              pop_gimplify_context
6001
                (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6002
              OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6003
 
6004
              gimplify_omp_ctxp = outer_ctx;
6005
            }
6006
          if (notice_outer)
6007
            goto do_notice;
6008
          break;
6009
 
6010
        case OMP_CLAUSE_COPYIN:
6011
        case OMP_CLAUSE_COPYPRIVATE:
6012
          decl = OMP_CLAUSE_DECL (c);
6013
          if (error_operand_p (decl))
6014
            {
6015
              remove = true;
6016
              break;
6017
            }
6018
        do_notice:
6019
          if (outer_ctx)
6020
            omp_notice_variable (outer_ctx, decl, true);
6021
          if (check_non_private
6022
              && region_type == ORT_WORKSHARE
6023
              && omp_check_private (ctx, decl))
6024
            {
6025
              error ("%s variable %qE is private in outer context",
6026
                     check_non_private, DECL_NAME (decl));
6027
              remove = true;
6028
            }
6029
          break;
6030
 
6031
        case OMP_CLAUSE_FINAL:
6032
        case OMP_CLAUSE_IF:
6033
          OMP_CLAUSE_OPERAND (c, 0)
6034
            = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6035
          /* Fall through.  */
6036
 
6037
        case OMP_CLAUSE_SCHEDULE:
6038
        case OMP_CLAUSE_NUM_THREADS:
6039
          if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6040
                             is_gimple_val, fb_rvalue) == GS_ERROR)
6041
              remove = true;
6042
          break;
6043
 
6044
        case OMP_CLAUSE_NOWAIT:
6045
        case OMP_CLAUSE_ORDERED:
6046
        case OMP_CLAUSE_UNTIED:
6047
        case OMP_CLAUSE_COLLAPSE:
6048
        case OMP_CLAUSE_MERGEABLE:
6049
          break;
6050
 
6051
        case OMP_CLAUSE_DEFAULT:
6052
          ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6053
          break;
6054
 
6055
        default:
6056
          gcc_unreachable ();
6057
        }
6058
 
6059
      if (remove)
6060
        *list_p = OMP_CLAUSE_CHAIN (c);
6061
      else
6062
        list_p = &OMP_CLAUSE_CHAIN (c);
6063
    }
6064
 
6065
  gimplify_omp_ctxp = ctx;
6066
}
6067
 
6068
/* For all variables that were not actually used within the context,
6069
   remove PRIVATE, SHARED, and FIRSTPRIVATE clauses.  */
6070
 
6071
static int
6072
gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6073
{
6074
  tree *list_p = (tree *) data;
6075
  tree decl = (tree) n->key;
6076
  unsigned flags = n->value;
6077
  enum omp_clause_code code;
6078
  tree clause;
6079
  bool private_debug;
6080
 
6081
  if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6082
    return 0;
6083
  if ((flags & GOVD_SEEN) == 0)
6084
    return 0;
6085
  if (flags & GOVD_DEBUG_PRIVATE)
6086
    {
6087
      gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6088
      private_debug = true;
6089
    }
6090
  else
6091
    private_debug
6092
      = lang_hooks.decls.omp_private_debug_clause (decl,
6093
                                                   !!(flags & GOVD_SHARED));
6094
  if (private_debug)
6095
    code = OMP_CLAUSE_PRIVATE;
6096
  else if (flags & GOVD_SHARED)
6097
    {
6098
      if (is_global_var (decl))
6099
        {
6100
          struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6101
          while (ctx != NULL)
6102
            {
6103
              splay_tree_node on
6104
                = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6105
              if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6106
                                      | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
6107
                break;
6108
              ctx = ctx->outer_context;
6109
            }
6110
          if (ctx == NULL)
6111
            return 0;
6112
        }
6113
      code = OMP_CLAUSE_SHARED;
6114
    }
6115
  else if (flags & GOVD_PRIVATE)
6116
    code = OMP_CLAUSE_PRIVATE;
6117
  else if (flags & GOVD_FIRSTPRIVATE)
6118
    code = OMP_CLAUSE_FIRSTPRIVATE;
6119
  else
6120
    gcc_unreachable ();
6121
 
6122
  clause = build_omp_clause (input_location, code);
6123
  OMP_CLAUSE_DECL (clause) = decl;
6124
  OMP_CLAUSE_CHAIN (clause) = *list_p;
6125
  if (private_debug)
6126
    OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6127
  else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6128
    OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6129
  *list_p = clause;
6130
  lang_hooks.decls.omp_finish_clause (clause);
6131
 
6132
  return 0;
6133
}
6134
 
6135
static void
6136
gimplify_adjust_omp_clauses (tree *list_p)
6137
{
6138
  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6139
  tree c, decl;
6140
 
6141
  while ((c = *list_p) != NULL)
6142
    {
6143
      splay_tree_node n;
6144
      bool remove = false;
6145
 
6146
      switch (OMP_CLAUSE_CODE (c))
6147
        {
6148
        case OMP_CLAUSE_PRIVATE:
6149
        case OMP_CLAUSE_SHARED:
6150
        case OMP_CLAUSE_FIRSTPRIVATE:
6151
          decl = OMP_CLAUSE_DECL (c);
6152
          n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6153
          remove = !(n->value & GOVD_SEEN);
6154
          if (! remove)
6155
            {
6156
              bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6157
              if ((n->value & GOVD_DEBUG_PRIVATE)
6158
                  || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6159
                {
6160
                  gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6161
                              || ((n->value & GOVD_DATA_SHARE_CLASS)
6162
                                  == GOVD_PRIVATE));
6163
                  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6164
                  OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6165
                }
6166
            }
6167
          break;
6168
 
6169
        case OMP_CLAUSE_LASTPRIVATE:
6170
          /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6171
             accurately reflect the presence of a FIRSTPRIVATE clause.  */
6172
          decl = OMP_CLAUSE_DECL (c);
6173
          n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6174
          OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6175
            = (n->value & GOVD_FIRSTPRIVATE) != 0;
6176
          break;
6177
 
6178
        case OMP_CLAUSE_REDUCTION:
6179
        case OMP_CLAUSE_COPYIN:
6180
        case OMP_CLAUSE_COPYPRIVATE:
6181
        case OMP_CLAUSE_IF:
6182
        case OMP_CLAUSE_NUM_THREADS:
6183
        case OMP_CLAUSE_SCHEDULE:
6184
        case OMP_CLAUSE_NOWAIT:
6185
        case OMP_CLAUSE_ORDERED:
6186
        case OMP_CLAUSE_DEFAULT:
6187
        case OMP_CLAUSE_UNTIED:
6188
        case OMP_CLAUSE_COLLAPSE:
6189
        case OMP_CLAUSE_FINAL:
6190
        case OMP_CLAUSE_MERGEABLE:
6191
          break;
6192
 
6193
        default:
6194
          gcc_unreachable ();
6195
        }
6196
 
6197
      if (remove)
6198
        *list_p = OMP_CLAUSE_CHAIN (c);
6199
      else
6200
        list_p = &OMP_CLAUSE_CHAIN (c);
6201
    }
6202
 
6203
  /* Add in any implicit data sharing.  */
6204
  splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
6205
 
6206
  gimplify_omp_ctxp = ctx->outer_context;
6207
  delete_omp_context (ctx);
6208
}
6209
 
6210
/* Gimplify the contents of an OMP_PARALLEL statement.  This involves
6211
   gimplification of the body, as well as scanning the body for used
6212
   variables.  We need to do this scan now, because variable-sized
6213
   decls will be decomposed during gimplification.  */
6214
 
6215
static void
6216
gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6217
{
6218
  tree expr = *expr_p;
6219
  gimple g;
6220
  gimple_seq body = NULL;
6221
  struct gimplify_ctx gctx;
6222
 
6223
  gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6224
                             OMP_PARALLEL_COMBINED (expr)
6225
                             ? ORT_COMBINED_PARALLEL
6226
                             : ORT_PARALLEL);
6227
 
6228
  push_gimplify_context (&gctx);
6229
 
6230
  g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6231
  if (gimple_code (g) == GIMPLE_BIND)
6232
    pop_gimplify_context (g);
6233
  else
6234
    pop_gimplify_context (NULL);
6235
 
6236
  gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6237
 
6238
  g = gimple_build_omp_parallel (body,
6239
                                 OMP_PARALLEL_CLAUSES (expr),
6240
                                 NULL_TREE, NULL_TREE);
6241
  if (OMP_PARALLEL_COMBINED (expr))
6242
    gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6243
  gimplify_seq_add_stmt (pre_p, g);
6244
  *expr_p = NULL_TREE;
6245
}
6246
 
6247
/* Gimplify the contents of an OMP_TASK statement.  This involves
6248
   gimplification of the body, as well as scanning the body for used
6249
   variables.  We need to do this scan now, because variable-sized
6250
   decls will be decomposed during gimplification.  */
6251
 
6252
static void
6253
gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6254
{
6255
  tree expr = *expr_p;
6256
  gimple g;
6257
  gimple_seq body = NULL;
6258
  struct gimplify_ctx gctx;
6259
 
6260
  gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6261
                             find_omp_clause (OMP_TASK_CLAUSES (expr),
6262
                                              OMP_CLAUSE_UNTIED)
6263
                             ? ORT_UNTIED_TASK : ORT_TASK);
6264
 
6265
  push_gimplify_context (&gctx);
6266
 
6267
  g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6268
  if (gimple_code (g) == GIMPLE_BIND)
6269
    pop_gimplify_context (g);
6270
  else
6271
    pop_gimplify_context (NULL);
6272
 
6273
  gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6274
 
6275
  g = gimple_build_omp_task (body,
6276
                             OMP_TASK_CLAUSES (expr),
6277
                             NULL_TREE, NULL_TREE,
6278
                             NULL_TREE, NULL_TREE, NULL_TREE);
6279
  gimplify_seq_add_stmt (pre_p, g);
6280
  *expr_p = NULL_TREE;
6281
}
6282
 
6283
/* Gimplify the gross structure of an OMP_FOR statement.  */
6284
 
6285
static enum gimplify_status
6286
gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6287
{
6288
  tree for_stmt, decl, var, t;
6289
  enum gimplify_status ret = GS_ALL_DONE;
6290
  enum gimplify_status tret;
6291
  gimple gfor;
6292
  gimple_seq for_body, for_pre_body;
6293
  int i;
6294
 
6295
  for_stmt = *expr_p;
6296
 
6297
  gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6298
                             ORT_WORKSHARE);
6299
 
6300
  /* Handle OMP_FOR_INIT.  */
6301
  for_pre_body = NULL;
6302
  gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6303
  OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6304
 
6305
  for_body = gimple_seq_alloc ();
6306
  gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6307
              == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6308
  gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6309
              == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6310
  for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6311
    {
6312
      t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6313
      gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6314
      decl = TREE_OPERAND (t, 0);
6315
      gcc_assert (DECL_P (decl));
6316
      gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6317
                  || POINTER_TYPE_P (TREE_TYPE (decl)));
6318
 
6319
      /* Make sure the iteration variable is private.  */
6320
      if (omp_is_private (gimplify_omp_ctxp, decl))
6321
        omp_notice_variable (gimplify_omp_ctxp, decl, true);
6322
      else
6323
        omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6324
 
6325
      /* If DECL is not a gimple register, create a temporary variable to act
6326
         as an iteration counter.  This is valid, since DECL cannot be
6327
         modified in the body of the loop.  */
6328
      if (!is_gimple_reg (decl))
6329
        {
6330
          var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6331
          TREE_OPERAND (t, 0) = var;
6332
 
6333
          gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6334
 
6335
          omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6336
        }
6337
      else
6338
        var = decl;
6339
 
6340
      tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6341
                            is_gimple_val, fb_rvalue);
6342
      ret = MIN (ret, tret);
6343
      if (ret == GS_ERROR)
6344
        return ret;
6345
 
6346
      /* Handle OMP_FOR_COND.  */
6347
      t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6348
      gcc_assert (COMPARISON_CLASS_P (t));
6349
      gcc_assert (TREE_OPERAND (t, 0) == decl);
6350
 
6351
      tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6352
                            is_gimple_val, fb_rvalue);
6353
      ret = MIN (ret, tret);
6354
 
6355
      /* Handle OMP_FOR_INCR.  */
6356
      t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6357
      switch (TREE_CODE (t))
6358
        {
6359
        case PREINCREMENT_EXPR:
6360
        case POSTINCREMENT_EXPR:
6361
          t = build_int_cst (TREE_TYPE (decl), 1);
6362
          t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6363
          t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6364
          TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6365
          break;
6366
 
6367
        case PREDECREMENT_EXPR:
6368
        case POSTDECREMENT_EXPR:
6369
          t = build_int_cst (TREE_TYPE (decl), -1);
6370
          t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6371
          t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6372
          TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6373
          break;
6374
 
6375
        case MODIFY_EXPR:
6376
          gcc_assert (TREE_OPERAND (t, 0) == decl);
6377
          TREE_OPERAND (t, 0) = var;
6378
 
6379
          t = TREE_OPERAND (t, 1);
6380
          switch (TREE_CODE (t))
6381
            {
6382
            case PLUS_EXPR:
6383
              if (TREE_OPERAND (t, 1) == decl)
6384
                {
6385
                  TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6386
                  TREE_OPERAND (t, 0) = var;
6387
                  break;
6388
                }
6389
 
6390
              /* Fallthru.  */
6391
            case MINUS_EXPR:
6392
            case POINTER_PLUS_EXPR:
6393
              gcc_assert (TREE_OPERAND (t, 0) == decl);
6394
              TREE_OPERAND (t, 0) = var;
6395
              break;
6396
            default:
6397
              gcc_unreachable ();
6398
            }
6399
 
6400
          tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6401
                                is_gimple_val, fb_rvalue);
6402
          ret = MIN (ret, tret);
6403
          break;
6404
 
6405
        default:
6406
          gcc_unreachable ();
6407
        }
6408
 
6409
      if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6410
        {
6411
          tree c;
6412
          for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6413
            if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6414
                && OMP_CLAUSE_DECL (c) == decl
6415
                && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6416
              {
6417
                t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6418
                gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6419
                gcc_assert (TREE_OPERAND (t, 0) == var);
6420
                t = TREE_OPERAND (t, 1);
6421
                gcc_assert (TREE_CODE (t) == PLUS_EXPR
6422
                            || TREE_CODE (t) == MINUS_EXPR
6423
                            || TREE_CODE (t) == POINTER_PLUS_EXPR);
6424
                gcc_assert (TREE_OPERAND (t, 0) == var);
6425
                t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6426
                            TREE_OPERAND (t, 1));
6427
                gimplify_assign (decl, t,
6428
                                 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6429
            }
6430
        }
6431
    }
6432
 
6433
  gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6434
 
6435
  gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6436
 
6437
  gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6438
                               TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6439
                               for_pre_body);
6440
 
6441
  for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6442
    {
6443
      t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6444
      gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6445
      gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6446
      t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6447
      gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6448
      gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6449
      t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6450
      gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6451
    }
6452
 
6453
  gimplify_seq_add_stmt (pre_p, gfor);
6454
  return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6455
}
6456
 
6457
/* Gimplify the gross structure of other OpenMP worksharing constructs.
6458
   In particular, OMP_SECTIONS and OMP_SINGLE.  */
6459
 
6460
static void
6461
gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6462
{
6463
  tree expr = *expr_p;
6464
  gimple stmt;
6465
  gimple_seq body = NULL;
6466
 
6467
  gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6468
  gimplify_and_add (OMP_BODY (expr), &body);
6469
  gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6470
 
6471
  if (TREE_CODE (expr) == OMP_SECTIONS)
6472
    stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6473
  else if (TREE_CODE (expr) == OMP_SINGLE)
6474
    stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6475
  else
6476
    gcc_unreachable ();
6477
 
6478
  gimplify_seq_add_stmt (pre_p, stmt);
6479
}
6480
 
6481
/* A subroutine of gimplify_omp_atomic.  The front end is supposed to have
6482
   stabilized the lhs of the atomic operation as *ADDR.  Return true if
6483
   EXPR is this stabilized form.  */
6484
 
6485
static bool
6486
goa_lhs_expr_p (tree expr, tree addr)
6487
{
6488
  /* Also include casts to other type variants.  The C front end is fond
6489
     of adding these for e.g. volatile variables.  This is like
6490
     STRIP_TYPE_NOPS but includes the main variant lookup.  */
6491
  STRIP_USELESS_TYPE_CONVERSION (expr);
6492
 
6493
  if (TREE_CODE (expr) == INDIRECT_REF)
6494
    {
6495
      expr = TREE_OPERAND (expr, 0);
6496
      while (expr != addr
6497
             && (CONVERT_EXPR_P (expr)
6498
                 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6499
             && TREE_CODE (expr) == TREE_CODE (addr)
6500
             && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
6501
        {
6502
          expr = TREE_OPERAND (expr, 0);
6503
          addr = TREE_OPERAND (addr, 0);
6504
        }
6505
      if (expr == addr)
6506
        return true;
6507
      return (TREE_CODE (addr) == ADDR_EXPR
6508
              && TREE_CODE (expr) == ADDR_EXPR
6509
              && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6510
    }
6511
  if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6512
    return true;
6513
  return false;
6514
}
6515
 
6516
/* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR.  If an
6517
   expression does not involve the lhs, evaluate it into a temporary.
6518
   Return 1 if the lhs appeared as a subexpression, 0 if it did not,
6519
   or -1 if an error was encountered.  */
6520
 
6521
static int
6522
goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6523
                    tree lhs_var)
6524
{
6525
  tree expr = *expr_p;
6526
  int saw_lhs;
6527
 
6528
  if (goa_lhs_expr_p (expr, lhs_addr))
6529
    {
6530
      *expr_p = lhs_var;
6531
      return 1;
6532
    }
6533
  if (is_gimple_val (expr))
6534
    return 0;
6535
 
6536
  saw_lhs = 0;
6537
  switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6538
    {
6539
    case tcc_binary:
6540
    case tcc_comparison:
6541
      saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6542
                                     lhs_var);
6543
    case tcc_unary:
6544
      saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6545
                                     lhs_var);
6546
      break;
6547
    case tcc_expression:
6548
      switch (TREE_CODE (expr))
6549
        {
6550
        case TRUTH_ANDIF_EXPR:
6551
        case TRUTH_ORIF_EXPR:
6552
        case TRUTH_AND_EXPR:
6553
        case TRUTH_OR_EXPR:
6554
        case TRUTH_XOR_EXPR:
6555
          saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6556
                                         lhs_addr, lhs_var);
6557
        case TRUTH_NOT_EXPR:
6558
          saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6559
                                         lhs_addr, lhs_var);
6560
          break;
6561
        case COMPOUND_EXPR:
6562
          /* Break out any preevaluations from cp_build_modify_expr.  */
6563
          for (; TREE_CODE (expr) == COMPOUND_EXPR;
6564
               expr = TREE_OPERAND (expr, 1))
6565
            gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
6566
          *expr_p = expr;
6567
          return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
6568
        default:
6569
          break;
6570
        }
6571
      break;
6572
    default:
6573
      break;
6574
    }
6575
 
6576
  if (saw_lhs == 0)
6577
    {
6578
      enum gimplify_status gs;
6579
      gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6580
      if (gs != GS_ALL_DONE)
6581
        saw_lhs = -1;
6582
    }
6583
 
6584
  return saw_lhs;
6585
}
6586
 
6587
/* Gimplify an OMP_ATOMIC statement.  */
6588
 
6589
static enum gimplify_status
6590
gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6591
{
6592
  tree addr = TREE_OPERAND (*expr_p, 0);
6593
  tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
6594
             ? NULL : TREE_OPERAND (*expr_p, 1);
6595
  tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6596
  tree tmp_load;
6597
  gimple loadstmt, storestmt;
6598
 
6599
  tmp_load = create_tmp_reg (type, NULL);
6600
  if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6601
    return GS_ERROR;
6602
 
6603
  if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6604
      != GS_ALL_DONE)
6605
    return GS_ERROR;
6606
 
6607
  loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
6608
  gimplify_seq_add_stmt (pre_p, loadstmt);
6609
  if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6610
      != GS_ALL_DONE)
6611
    return GS_ERROR;
6612
 
6613
  if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
6614
    rhs = tmp_load;
6615
  storestmt = gimple_build_omp_atomic_store (rhs);
6616
  gimplify_seq_add_stmt (pre_p, storestmt);
6617
  switch (TREE_CODE (*expr_p))
6618
    {
6619
    case OMP_ATOMIC_READ:
6620
    case OMP_ATOMIC_CAPTURE_OLD:
6621
      *expr_p = tmp_load;
6622
      gimple_omp_atomic_set_need_value (loadstmt);
6623
      break;
6624
    case OMP_ATOMIC_CAPTURE_NEW:
6625
      *expr_p = rhs;
6626
      gimple_omp_atomic_set_need_value (storestmt);
6627
      break;
6628
    default:
6629
      *expr_p = NULL;
6630
      break;
6631
    }
6632
 
6633
   return GS_ALL_DONE;
6634
}
6635
 
6636
/* Gimplify a TRANSACTION_EXPR.  This involves gimplification of the
6637
   body, and adding some EH bits.  */
6638
 
6639
static enum gimplify_status
6640
gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
6641
{
6642
  tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
6643
  gimple g;
6644
  gimple_seq body = NULL;
6645
  struct gimplify_ctx gctx;
6646
  int subcode = 0;
6647
 
6648
  /* Wrap the transaction body in a BIND_EXPR so we have a context
6649
     where to put decls for OpenMP.  */
6650
  if (TREE_CODE (tbody) != BIND_EXPR)
6651
    {
6652
      tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
6653
      TREE_SIDE_EFFECTS (bind) = 1;
6654
      SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
6655
      TRANSACTION_EXPR_BODY (expr) = bind;
6656
    }
6657
 
6658
  push_gimplify_context (&gctx);
6659
  temp = voidify_wrapper_expr (*expr_p, NULL);
6660
 
6661
  g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
6662
  pop_gimplify_context (g);
6663
 
6664
  g = gimple_build_transaction (body, NULL);
6665
  if (TRANSACTION_EXPR_OUTER (expr))
6666
    subcode = GTMA_IS_OUTER;
6667
  else if (TRANSACTION_EXPR_RELAXED (expr))
6668
    subcode = GTMA_IS_RELAXED;
6669
  gimple_transaction_set_subcode (g, subcode);
6670
 
6671
  gimplify_seq_add_stmt (pre_p, g);
6672
 
6673
  if (temp)
6674
    {
6675
      *expr_p = temp;
6676
      return GS_OK;
6677
    }
6678
 
6679
  *expr_p = NULL_TREE;
6680
  return GS_ALL_DONE;
6681
}
6682
 
6683
/* Convert the GENERIC expression tree *EXPR_P to GIMPLE.  If the
6684
   expression produces a value to be used as an operand inside a GIMPLE
6685
   statement, the value will be stored back in *EXPR_P.  This value will
6686
   be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6687
   an SSA_NAME.  The corresponding sequence of GIMPLE statements is
6688
   emitted in PRE_P and POST_P.
6689
 
6690
   Additionally, this process may overwrite parts of the input
6691
   expression during gimplification.  Ideally, it should be
6692
   possible to do non-destructive gimplification.
6693
 
6694
   EXPR_P points to the GENERIC expression to convert to GIMPLE.  If
6695
      the expression needs to evaluate to a value to be used as
6696
      an operand in a GIMPLE statement, this value will be stored in
6697
      *EXPR_P on exit.  This happens when the caller specifies one
6698
      of fb_lvalue or fb_rvalue fallback flags.
6699
 
6700
   PRE_P will contain the sequence of GIMPLE statements corresponding
6701
       to the evaluation of EXPR and all the side-effects that must
6702
       be executed before the main expression.  On exit, the last
6703
       statement of PRE_P is the core statement being gimplified.  For
6704
       instance, when gimplifying 'if (++a)' the last statement in
6705
       PRE_P will be 'if (t.1)' where t.1 is the result of
6706
       pre-incrementing 'a'.
6707
 
6708
   POST_P will contain the sequence of GIMPLE statements corresponding
6709
       to the evaluation of all the side-effects that must be executed
6710
       after the main expression.  If this is NULL, the post
6711
       side-effects are stored at the end of PRE_P.
6712
 
6713
       The reason why the output is split in two is to handle post
6714
       side-effects explicitly.  In some cases, an expression may have
6715
       inner and outer post side-effects which need to be emitted in
6716
       an order different from the one given by the recursive
6717
       traversal.  For instance, for the expression (*p--)++ the post
6718
       side-effects of '--' must actually occur *after* the post
6719
       side-effects of '++'.  However, gimplification will first visit
6720
       the inner expression, so if a separate POST sequence was not
6721
       used, the resulting sequence would be:
6722
 
6723
            1   t.1 = *p
6724
            2   p = p - 1
6725
            3   t.2 = t.1 + 1
6726
            4   *p = t.2
6727
 
6728
       However, the post-decrement operation in line #2 must not be
6729
       evaluated until after the store to *p at line #4, so the
6730
       correct sequence should be:
6731
 
6732
            1   t.1 = *p
6733
            2   t.2 = t.1 + 1
6734
            3   *p = t.2
6735
            4   p = p - 1
6736
 
6737
       So, by specifying a separate post queue, it is possible
6738
       to emit the post side-effects in the correct order.
6739
       If POST_P is NULL, an internal queue will be used.  Before
6740
       returning to the caller, the sequence POST_P is appended to
6741
       the main output sequence PRE_P.
6742
 
6743
   GIMPLE_TEST_F points to a function that takes a tree T and
6744
       returns nonzero if T is in the GIMPLE form requested by the
6745
       caller.  The GIMPLE predicates are in gimple.c.
6746
 
6747
   FALLBACK tells the function what sort of a temporary we want if
6748
       gimplification cannot produce an expression that complies with
6749
       GIMPLE_TEST_F.
6750
 
6751
       fb_none means that no temporary should be generated
6752
       fb_rvalue means that an rvalue is OK to generate
6753
       fb_lvalue means that an lvalue is OK to generate
6754
       fb_either means that either is OK, but an lvalue is preferable.
6755
       fb_mayfail means that gimplification may fail (in which case
6756
       GS_ERROR will be returned)
6757
 
6758
   The return value is either GS_ERROR or GS_ALL_DONE, since this
6759
   function iterates until EXPR is completely gimplified or an error
6760
   occurs.  */
6761
 
6762
enum gimplify_status
6763
gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6764
               bool (*gimple_test_f) (tree), fallback_t fallback)
6765
{
6766
  tree tmp;
6767
  gimple_seq internal_pre = NULL;
6768
  gimple_seq internal_post = NULL;
6769
  tree save_expr;
6770
  bool is_statement;
6771
  location_t saved_location;
6772
  enum gimplify_status ret;
6773
  gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6774
 
6775
  save_expr = *expr_p;
6776
  if (save_expr == NULL_TREE)
6777
    return GS_ALL_DONE;
6778
 
6779
  /* If we are gimplifying a top-level statement, PRE_P must be valid.  */
6780
  is_statement = gimple_test_f == is_gimple_stmt;
6781
  if (is_statement)
6782
    gcc_assert (pre_p);
6783
 
6784
  /* Consistency checks.  */
6785
  if (gimple_test_f == is_gimple_reg)
6786
    gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6787
  else if (gimple_test_f == is_gimple_val
6788
           || gimple_test_f == is_gimple_call_addr
6789
           || gimple_test_f == is_gimple_condexpr
6790
           || gimple_test_f == is_gimple_mem_rhs
6791
           || gimple_test_f == is_gimple_mem_rhs_or_call
6792
           || gimple_test_f == is_gimple_reg_rhs
6793
           || gimple_test_f == is_gimple_reg_rhs_or_call
6794
           || gimple_test_f == is_gimple_asm_val
6795
           || gimple_test_f == is_gimple_mem_ref_addr)
6796
    gcc_assert (fallback & fb_rvalue);
6797
  else if (gimple_test_f == is_gimple_min_lval
6798
           || gimple_test_f == is_gimple_lvalue)
6799
    gcc_assert (fallback & fb_lvalue);
6800
  else if (gimple_test_f == is_gimple_addressable)
6801
    gcc_assert (fallback & fb_either);
6802
  else if (gimple_test_f == is_gimple_stmt)
6803
    gcc_assert (fallback == fb_none);
6804
  else
6805
    {
6806
      /* We should have recognized the GIMPLE_TEST_F predicate to
6807
         know what kind of fallback to use in case a temporary is
6808
         needed to hold the value or address of *EXPR_P.  */
6809
      gcc_unreachable ();
6810
    }
6811
 
6812
  /* We used to check the predicate here and return immediately if it
6813
     succeeds.  This is wrong; the design is for gimplification to be
6814
     idempotent, and for the predicates to only test for valid forms, not
6815
     whether they are fully simplified.  */
6816
  if (pre_p == NULL)
6817
    pre_p = &internal_pre;
6818
 
6819
  if (post_p == NULL)
6820
    post_p = &internal_post;
6821
 
6822
  /* Remember the last statements added to PRE_P and POST_P.  Every
6823
     new statement added by the gimplification helpers needs to be
6824
     annotated with location information.  To centralize the
6825
     responsibility, we remember the last statement that had been
6826
     added to both queues before gimplifying *EXPR_P.  If
6827
     gimplification produces new statements in PRE_P and POST_P, those
6828
     statements will be annotated with the same location information
6829
     as *EXPR_P.  */
6830
  pre_last_gsi = gsi_last (*pre_p);
6831
  post_last_gsi = gsi_last (*post_p);
6832
 
6833
  saved_location = input_location;
6834
  if (save_expr != error_mark_node
6835
      && EXPR_HAS_LOCATION (*expr_p))
6836
    input_location = EXPR_LOCATION (*expr_p);
6837
 
6838
  /* Loop over the specific gimplifiers until the toplevel node
6839
     remains the same.  */
6840
  do
6841
    {
6842
      /* Strip away as many useless type conversions as possible
6843
         at the toplevel.  */
6844
      STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6845
 
6846
      /* Remember the expr.  */
6847
      save_expr = *expr_p;
6848
 
6849
      /* Die, die, die, my darling.  */
6850
      if (save_expr == error_mark_node
6851
          || (TREE_TYPE (save_expr)
6852
              && TREE_TYPE (save_expr) == error_mark_node))
6853
        {
6854
          ret = GS_ERROR;
6855
          break;
6856
        }
6857
 
6858
      /* Do any language-specific gimplification.  */
6859
      ret = ((enum gimplify_status)
6860
             lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6861
      if (ret == GS_OK)
6862
        {
6863
          if (*expr_p == NULL_TREE)
6864
            break;
6865
          if (*expr_p != save_expr)
6866
            continue;
6867
        }
6868
      else if (ret != GS_UNHANDLED)
6869
        break;
6870
 
6871
      /* Make sure that all the cases set 'ret' appropriately.  */
6872
      ret = GS_UNHANDLED;
6873
      switch (TREE_CODE (*expr_p))
6874
        {
6875
          /* First deal with the special cases.  */
6876
 
6877
        case POSTINCREMENT_EXPR:
6878
        case POSTDECREMENT_EXPR:
6879
        case PREINCREMENT_EXPR:
6880
        case PREDECREMENT_EXPR:
6881
          ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6882
                                        fallback != fb_none);
6883
          break;
6884
 
6885
        case ARRAY_REF:
6886
        case ARRAY_RANGE_REF:
6887
        case REALPART_EXPR:
6888
        case IMAGPART_EXPR:
6889
        case COMPONENT_REF:
6890
        case VIEW_CONVERT_EXPR:
6891
          ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6892
                                        fallback ? fallback : fb_rvalue);
6893
          break;
6894
 
6895
        case COND_EXPR:
6896
          ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6897
 
6898
          /* C99 code may assign to an array in a structure value of a
6899
             conditional expression, and this has undefined behavior
6900
             only on execution, so create a temporary if an lvalue is
6901
             required.  */
6902
          if (fallback == fb_lvalue)
6903
            {
6904
              *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6905
              mark_addressable (*expr_p);
6906
              ret = GS_OK;
6907
            }
6908
          break;
6909
 
6910
        case CALL_EXPR:
6911
          ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6912
 
6913
          /* C99 code may assign to an array in a structure returned
6914
             from a function, and this has undefined behavior only on
6915
             execution, so create a temporary if an lvalue is
6916
             required.  */
6917
          if (fallback == fb_lvalue)
6918
            {
6919
              *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6920
              mark_addressable (*expr_p);
6921
              ret = GS_OK;
6922
            }
6923
          break;
6924
 
6925
        case TREE_LIST:
6926
          gcc_unreachable ();
6927
 
6928
        case COMPOUND_EXPR:
6929
          ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6930
          break;
6931
 
6932
        case COMPOUND_LITERAL_EXPR:
6933
          ret = gimplify_compound_literal_expr (expr_p, pre_p);
6934
          break;
6935
 
6936
        case MODIFY_EXPR:
6937
        case INIT_EXPR:
6938
          ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6939
                                      fallback != fb_none);
6940
          break;
6941
 
6942
        case TRUTH_ANDIF_EXPR:
6943
        case TRUTH_ORIF_EXPR:
6944
          {
6945
            /* Preserve the original type of the expression and the
6946
               source location of the outer expression.  */
6947
            tree org_type = TREE_TYPE (*expr_p);
6948
            *expr_p = gimple_boolify (*expr_p);
6949
            *expr_p = build3_loc (input_location, COND_EXPR,
6950
                                  org_type, *expr_p,
6951
                                  fold_convert_loc
6952
                                    (input_location,
6953
                                     org_type, boolean_true_node),
6954
                                  fold_convert_loc
6955
                                    (input_location,
6956
                                     org_type, boolean_false_node));
6957
            ret = GS_OK;
6958
            break;
6959
          }
6960
 
6961
        case TRUTH_NOT_EXPR:
6962
          {
6963
            tree type = TREE_TYPE (*expr_p);
6964
            /* The parsers are careful to generate TRUTH_NOT_EXPR
6965
               only with operands that are always zero or one.
6966
               We do not fold here but handle the only interesting case
6967
               manually, as fold may re-introduce the TRUTH_NOT_EXPR.  */
6968
            *expr_p = gimple_boolify (*expr_p);
6969
            if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
6970
              *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
6971
                                    TREE_TYPE (*expr_p),
6972
                                    TREE_OPERAND (*expr_p, 0));
6973
            else
6974
              *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
6975
                                    TREE_TYPE (*expr_p),
6976
                                    TREE_OPERAND (*expr_p, 0),
6977
                                    build_int_cst (TREE_TYPE (*expr_p), 1));
6978
            if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
6979
              *expr_p = fold_convert_loc (input_location, type, *expr_p);
6980
            ret = GS_OK;
6981
            break;
6982
          }
6983
 
6984
        case ADDR_EXPR:
6985
          ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6986
          break;
6987
 
6988
        case VA_ARG_EXPR:
6989
          ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6990
          break;
6991
 
6992
        CASE_CONVERT:
6993
          if (IS_EMPTY_STMT (*expr_p))
6994
            {
6995
              ret = GS_ALL_DONE;
6996
              break;
6997
            }
6998
 
6999
          if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7000
              || fallback == fb_none)
7001
            {
7002
              /* Just strip a conversion to void (or in void context) and
7003
                 try again.  */
7004
              *expr_p = TREE_OPERAND (*expr_p, 0);
7005
              ret = GS_OK;
7006
              break;
7007
            }
7008
 
7009
          ret = gimplify_conversion (expr_p);
7010
          if (ret == GS_ERROR)
7011
            break;
7012
          if (*expr_p != save_expr)
7013
            break;
7014
          /* FALLTHRU */
7015
 
7016
        case FIX_TRUNC_EXPR:
7017
          /* unary_expr: ... | '(' cast ')' val | ...  */
7018
          ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7019
                               is_gimple_val, fb_rvalue);
7020
          recalculate_side_effects (*expr_p);
7021
          break;
7022
 
7023
        case INDIRECT_REF:
7024
          {
7025
            bool volatilep = TREE_THIS_VOLATILE (*expr_p);
7026
            bool notrap = TREE_THIS_NOTRAP (*expr_p);
7027
            tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7028
 
7029
            *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7030
            if (*expr_p != save_expr)
7031
              {
7032
                ret = GS_OK;
7033
                break;
7034
              }
7035
 
7036
            ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7037
                                 is_gimple_reg, fb_rvalue);
7038
            if (ret == GS_ERROR)
7039
              break;
7040
 
7041
            recalculate_side_effects (*expr_p);
7042
            *expr_p = fold_build2_loc (input_location, MEM_REF,
7043
                                       TREE_TYPE (*expr_p),
7044
                                       TREE_OPERAND (*expr_p, 0),
7045
                                       build_int_cst (saved_ptr_type, 0));
7046
            TREE_THIS_VOLATILE (*expr_p) = volatilep;
7047
            TREE_THIS_NOTRAP (*expr_p) = notrap;
7048
            ret = GS_OK;
7049
            break;
7050
          }
7051
 
7052
        /* We arrive here through the various re-gimplifcation paths.  */
7053
        case MEM_REF:
7054
          /* First try re-folding the whole thing.  */
7055
          tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7056
                             TREE_OPERAND (*expr_p, 0),
7057
                             TREE_OPERAND (*expr_p, 1));
7058
          if (tmp)
7059
            {
7060
              *expr_p = tmp;
7061
              recalculate_side_effects (*expr_p);
7062
              ret = GS_OK;
7063
              break;
7064
            }
7065
          /* Avoid re-gimplifying the address operand if it is already
7066
             in suitable form.  Re-gimplifying would mark the address
7067
             operand addressable.  Always gimplify when not in SSA form
7068
             as we still may have to gimplify decls with value-exprs.  */
7069
          if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7070
              || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7071
            {
7072
              ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7073
                                   is_gimple_mem_ref_addr, fb_rvalue);
7074
              if (ret == GS_ERROR)
7075
                break;
7076
            }
7077
          recalculate_side_effects (*expr_p);
7078
          ret = GS_ALL_DONE;
7079
          break;
7080
 
7081
        /* Constants need not be gimplified.  */
7082
        case INTEGER_CST:
7083
        case REAL_CST:
7084
        case FIXED_CST:
7085
        case STRING_CST:
7086
        case COMPLEX_CST:
7087
        case VECTOR_CST:
7088
          ret = GS_ALL_DONE;
7089
          break;
7090
 
7091
        case CONST_DECL:
7092
          /* If we require an lvalue, such as for ADDR_EXPR, retain the
7093
             CONST_DECL node.  Otherwise the decl is replaceable by its
7094
             value.  */
7095
          /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either.  */
7096
          if (fallback & fb_lvalue)
7097
            ret = GS_ALL_DONE;
7098
          else
7099
            {
7100
              *expr_p = DECL_INITIAL (*expr_p);
7101
              ret = GS_OK;
7102
            }
7103
          break;
7104
 
7105
        case DECL_EXPR:
7106
          ret = gimplify_decl_expr (expr_p, pre_p);
7107
          break;
7108
 
7109
        case BIND_EXPR:
7110
          ret = gimplify_bind_expr (expr_p, pre_p);
7111
          break;
7112
 
7113
        case LOOP_EXPR:
7114
          ret = gimplify_loop_expr (expr_p, pre_p);
7115
          break;
7116
 
7117
        case SWITCH_EXPR:
7118
          ret = gimplify_switch_expr (expr_p, pre_p);
7119
          break;
7120
 
7121
        case EXIT_EXPR:
7122
          ret = gimplify_exit_expr (expr_p);
7123
          break;
7124
 
7125
        case GOTO_EXPR:
7126
          /* If the target is not LABEL, then it is a computed jump
7127
             and the target needs to be gimplified.  */
7128
          if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
7129
            {
7130
              ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7131
                                   NULL, is_gimple_val, fb_rvalue);
7132
              if (ret == GS_ERROR)
7133
                break;
7134
            }
7135
          gimplify_seq_add_stmt (pre_p,
7136
                          gimple_build_goto (GOTO_DESTINATION (*expr_p)));
7137
          ret = GS_ALL_DONE;
7138
          break;
7139
 
7140
        case PREDICT_EXPR:
7141
          gimplify_seq_add_stmt (pre_p,
7142
                        gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7143
                                              PREDICT_EXPR_OUTCOME (*expr_p)));
7144
          ret = GS_ALL_DONE;
7145
          break;
7146
 
7147
        case LABEL_EXPR:
7148
          ret = GS_ALL_DONE;
7149
          gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7150
                      == current_function_decl);
7151
          gimplify_seq_add_stmt (pre_p,
7152
                          gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
7153
          break;
7154
 
7155
        case CASE_LABEL_EXPR:
7156
          ret = gimplify_case_label_expr (expr_p, pre_p);
7157
          break;
7158
 
7159
        case RETURN_EXPR:
7160
          ret = gimplify_return_expr (*expr_p, pre_p);
7161
          break;
7162
 
7163
        case CONSTRUCTOR:
7164
          /* Don't reduce this in place; let gimplify_init_constructor work its
7165
             magic.  Buf if we're just elaborating this for side effects, just
7166
             gimplify any element that has side-effects.  */
7167
          if (fallback == fb_none)
7168
            {
7169
              unsigned HOST_WIDE_INT ix;
7170
              tree val;
7171
              tree temp = NULL_TREE;
7172
              FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7173
                if (TREE_SIDE_EFFECTS (val))
7174
                  append_to_statement_list (val, &temp);
7175
 
7176
              *expr_p = temp;
7177
              ret = temp ? GS_OK : GS_ALL_DONE;
7178
            }
7179
          /* C99 code may assign to an array in a constructed
7180
             structure or union, and this has undefined behavior only
7181
             on execution, so create a temporary if an lvalue is
7182
             required.  */
7183
          else if (fallback == fb_lvalue)
7184
            {
7185
              *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7186
              mark_addressable (*expr_p);
7187
              ret = GS_OK;
7188
            }
7189
          else
7190
            ret = GS_ALL_DONE;
7191
          break;
7192
 
7193
          /* The following are special cases that are not handled by the
7194
             original GIMPLE grammar.  */
7195
 
7196
          /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7197
             eliminated.  */
7198
        case SAVE_EXPR:
7199
          ret = gimplify_save_expr (expr_p, pre_p, post_p);
7200
          break;
7201
 
7202
        case BIT_FIELD_REF:
7203
          {
7204
            enum gimplify_status r0, r1, r2;
7205
 
7206
            r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7207
                                post_p, is_gimple_lvalue, fb_either);
7208
            r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7209
                                post_p, is_gimple_val, fb_rvalue);
7210
            r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
7211
                                post_p, is_gimple_val, fb_rvalue);
7212
            recalculate_side_effects (*expr_p);
7213
 
7214
            ret = MIN (r0, MIN (r1, r2));
7215
          }
7216
          break;
7217
 
7218
        case TARGET_MEM_REF:
7219
          {
7220
            enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7221
 
7222
            if (TMR_BASE (*expr_p))
7223
              r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
7224
                                  post_p, is_gimple_mem_ref_addr, fb_either);
7225
            if (TMR_INDEX (*expr_p))
7226
              r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7227
                                  post_p, is_gimple_val, fb_rvalue);
7228
            if (TMR_INDEX2 (*expr_p))
7229
              r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7230
                                  post_p, is_gimple_val, fb_rvalue);
7231
            /* TMR_STEP and TMR_OFFSET are always integer constants.  */
7232
            ret = MIN (r0, r1);
7233
          }
7234
          break;
7235
 
7236
        case NON_LVALUE_EXPR:
7237
          /* This should have been stripped above.  */
7238
          gcc_unreachable ();
7239
 
7240
        case ASM_EXPR:
7241
          ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7242
          break;
7243
 
7244
        case TRY_FINALLY_EXPR:
7245
        case TRY_CATCH_EXPR:
7246
          {
7247
            gimple_seq eval, cleanup;
7248
            gimple try_;
7249
 
7250
            eval = cleanup = NULL;
7251
            gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7252
            gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
7253
            /* Don't create bogus GIMPLE_TRY with empty cleanup.  */
7254
            if (gimple_seq_empty_p (cleanup))
7255
              {
7256
                gimple_seq_add_seq (pre_p, eval);
7257
                ret = GS_ALL_DONE;
7258
                break;
7259
              }
7260
            try_ = gimple_build_try (eval, cleanup,
7261
                                     TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
7262
                                     ? GIMPLE_TRY_FINALLY
7263
                                     : GIMPLE_TRY_CATCH);
7264
            if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
7265
              gimple_try_set_catch_is_cleanup (try_,
7266
                                               TRY_CATCH_IS_CLEANUP (*expr_p));
7267
            gimplify_seq_add_stmt (pre_p, try_);
7268
            ret = GS_ALL_DONE;
7269
            break;
7270
          }
7271
 
7272
        case CLEANUP_POINT_EXPR:
7273
          ret = gimplify_cleanup_point_expr (expr_p, pre_p);
7274
          break;
7275
 
7276
        case TARGET_EXPR:
7277
          ret = gimplify_target_expr (expr_p, pre_p, post_p);
7278
          break;
7279
 
7280
        case CATCH_EXPR:
7281
          {
7282
            gimple c;
7283
            gimple_seq handler = NULL;
7284
            gimplify_and_add (CATCH_BODY (*expr_p), &handler);
7285
            c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
7286
            gimplify_seq_add_stmt (pre_p, c);
7287
            ret = GS_ALL_DONE;
7288
            break;
7289
          }
7290
 
7291
        case EH_FILTER_EXPR:
7292
          {
7293
            gimple ehf;
7294
            gimple_seq failure = NULL;
7295
 
7296
            gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
7297
            ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
7298
            gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
7299
            gimplify_seq_add_stmt (pre_p, ehf);
7300
            ret = GS_ALL_DONE;
7301
            break;
7302
          }
7303
 
7304
        case OBJ_TYPE_REF:
7305
          {
7306
            enum gimplify_status r0, r1;
7307
            r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
7308
                                post_p, is_gimple_val, fb_rvalue);
7309
            r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
7310
                                post_p, is_gimple_val, fb_rvalue);
7311
            TREE_SIDE_EFFECTS (*expr_p) = 0;
7312
            ret = MIN (r0, r1);
7313
          }
7314
          break;
7315
 
7316
        case LABEL_DECL:
7317
          /* We get here when taking the address of a label.  We mark
7318
             the label as "forced"; meaning it can never be removed and
7319
             it is a potential target for any computed goto.  */
7320
          FORCED_LABEL (*expr_p) = 1;
7321
          ret = GS_ALL_DONE;
7322
          break;
7323
 
7324
        case STATEMENT_LIST:
7325
          ret = gimplify_statement_list (expr_p, pre_p);
7326
          break;
7327
 
7328
        case WITH_SIZE_EXPR:
7329
          {
7330
            gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7331
                           post_p == &internal_post ? NULL : post_p,
7332
                           gimple_test_f, fallback);
7333
            gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7334
                           is_gimple_val, fb_rvalue);
7335
            ret = GS_ALL_DONE;
7336
          }
7337
          break;
7338
 
7339
        case VAR_DECL:
7340
        case PARM_DECL:
7341
          ret = gimplify_var_or_parm_decl (expr_p);
7342
          break;
7343
 
7344
        case RESULT_DECL:
7345
          /* When within an OpenMP context, notice uses of variables.  */
7346
          if (gimplify_omp_ctxp)
7347
            omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
7348
          ret = GS_ALL_DONE;
7349
          break;
7350
 
7351
        case SSA_NAME:
7352
          /* Allow callbacks into the gimplifier during optimization.  */
7353
          ret = GS_ALL_DONE;
7354
          break;
7355
 
7356
        case OMP_PARALLEL:
7357
          gimplify_omp_parallel (expr_p, pre_p);
7358
          ret = GS_ALL_DONE;
7359
          break;
7360
 
7361
        case OMP_TASK:
7362
          gimplify_omp_task (expr_p, pre_p);
7363
          ret = GS_ALL_DONE;
7364
          break;
7365
 
7366
        case OMP_FOR:
7367
          ret = gimplify_omp_for (expr_p, pre_p);
7368
          break;
7369
 
7370
        case OMP_SECTIONS:
7371
        case OMP_SINGLE:
7372
          gimplify_omp_workshare (expr_p, pre_p);
7373
          ret = GS_ALL_DONE;
7374
          break;
7375
 
7376
        case OMP_SECTION:
7377
        case OMP_MASTER:
7378
        case OMP_ORDERED:
7379
        case OMP_CRITICAL:
7380
          {
7381
            gimple_seq body = NULL;
7382
            gimple g;
7383
 
7384
            gimplify_and_add (OMP_BODY (*expr_p), &body);
7385
            switch (TREE_CODE (*expr_p))
7386
              {
7387
              case OMP_SECTION:
7388
                g = gimple_build_omp_section (body);
7389
                break;
7390
              case OMP_MASTER:
7391
                g = gimple_build_omp_master (body);
7392
                break;
7393
              case OMP_ORDERED:
7394
                g = gimple_build_omp_ordered (body);
7395
                break;
7396
              case OMP_CRITICAL:
7397
                g = gimple_build_omp_critical (body,
7398
                                               OMP_CRITICAL_NAME (*expr_p));
7399
                break;
7400
              default:
7401
                gcc_unreachable ();
7402
              }
7403
            gimplify_seq_add_stmt (pre_p, g);
7404
            ret = GS_ALL_DONE;
7405
            break;
7406
          }
7407
 
7408
        case OMP_ATOMIC:
7409
        case OMP_ATOMIC_READ:
7410
        case OMP_ATOMIC_CAPTURE_OLD:
7411
        case OMP_ATOMIC_CAPTURE_NEW:
7412
          ret = gimplify_omp_atomic (expr_p, pre_p);
7413
          break;
7414
 
7415
        case TRANSACTION_EXPR:
7416
          ret = gimplify_transaction (expr_p, pre_p);
7417
          break;
7418
 
7419
        case TRUTH_AND_EXPR:
7420
        case TRUTH_OR_EXPR:
7421
        case TRUTH_XOR_EXPR:
7422
          {
7423
            tree orig_type = TREE_TYPE (*expr_p);
7424
            tree new_type, xop0, xop1;
7425
            *expr_p = gimple_boolify (*expr_p);
7426
            new_type = TREE_TYPE (*expr_p);
7427
            if (!useless_type_conversion_p (orig_type, new_type))
7428
              {
7429
                *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
7430
                ret = GS_OK;
7431
                break;
7432
              }
7433
 
7434
          /* Boolified binary truth expressions are semantically equivalent
7435
             to bitwise binary expressions.  Canonicalize them to the
7436
             bitwise variant.  */
7437
            switch (TREE_CODE (*expr_p))
7438
              {
7439
              case TRUTH_AND_EXPR:
7440
                TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
7441
                break;
7442
              case TRUTH_OR_EXPR:
7443
                TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
7444
                break;
7445
              case TRUTH_XOR_EXPR:
7446
                TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
7447
                break;
7448
              default:
7449
                break;
7450
              }
7451
            /* Now make sure that operands have compatible type to
7452
               expression's new_type.  */
7453
            xop0 = TREE_OPERAND (*expr_p, 0);
7454
            xop1 = TREE_OPERAND (*expr_p, 1);
7455
            if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
7456
              TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
7457
                                                            new_type,
7458
                                                            xop0);
7459
            if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
7460
              TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
7461
                                                            new_type,
7462
                                                            xop1);
7463
            /* Continue classified as tcc_binary.  */
7464
            goto expr_2;
7465
          }
7466
 
7467
        case FMA_EXPR:
7468
        case VEC_PERM_EXPR:
7469
          /* Classified as tcc_expression.  */
7470
          goto expr_3;
7471
 
7472
        case POINTER_PLUS_EXPR:
7473
          {
7474
            enum gimplify_status r0, r1;
7475
            r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7476
                                post_p, is_gimple_val, fb_rvalue);
7477
            r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7478
                                post_p, is_gimple_val, fb_rvalue);
7479
            recalculate_side_effects (*expr_p);
7480
            ret = MIN (r0, r1);
7481
            /* Convert &X + CST to invariant &MEM[&X, CST].  Do this
7482
               after gimplifying operands - this is similar to how
7483
               it would be folding all gimplified stmts on creation
7484
               to have them canonicalized, which is what we eventually
7485
               should do anyway.  */
7486
            if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7487
                && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
7488
              {
7489
                *expr_p = build_fold_addr_expr_with_type_loc
7490
                   (input_location,
7491
                    fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
7492
                                 TREE_OPERAND (*expr_p, 0),
7493
                                 fold_convert (ptr_type_node,
7494
                                               TREE_OPERAND (*expr_p, 1))),
7495
                    TREE_TYPE (*expr_p));
7496
                ret = MIN (ret, GS_OK);
7497
              }
7498
            break;
7499
          }
7500
 
7501
        default:
7502
          switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
7503
            {
7504
            case tcc_comparison:
7505
              /* Handle comparison of objects of non scalar mode aggregates
7506
                 with a call to memcmp.  It would be nice to only have to do
7507
                 this for variable-sized objects, but then we'd have to allow
7508
                 the same nest of reference nodes we allow for MODIFY_EXPR and
7509
                 that's too complex.
7510
 
7511
                 Compare scalar mode aggregates as scalar mode values.  Using
7512
                 memcmp for them would be very inefficient at best, and is
7513
                 plain wrong if bitfields are involved.  */
7514
                {
7515
                  tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
7516
 
7517
                  /* Vector comparisons need no boolification.  */
7518
                  if (TREE_CODE (type) == VECTOR_TYPE)
7519
                    goto expr_2;
7520
                  else if (!AGGREGATE_TYPE_P (type))
7521
                    {
7522
                      tree org_type = TREE_TYPE (*expr_p);
7523
                      *expr_p = gimple_boolify (*expr_p);
7524
                      if (!useless_type_conversion_p (org_type,
7525
                                                      TREE_TYPE (*expr_p)))
7526
                        {
7527
                          *expr_p = fold_convert_loc (input_location,
7528
                                                      org_type, *expr_p);
7529
                          ret = GS_OK;
7530
                        }
7531
                      else
7532
                        goto expr_2;
7533
                    }
7534
                  else if (TYPE_MODE (type) != BLKmode)
7535
                    ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7536
                  else
7537
                    ret = gimplify_variable_sized_compare (expr_p);
7538
 
7539
                  break;
7540
                }
7541
 
7542
            /* If *EXPR_P does not need to be special-cased, handle it
7543
               according to its class.  */
7544
            case tcc_unary:
7545
              ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7546
                                   post_p, is_gimple_val, fb_rvalue);
7547
              break;
7548
 
7549
            case tcc_binary:
7550
            expr_2:
7551
              {
7552
                enum gimplify_status r0, r1;
7553
 
7554
                r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7555
                                    post_p, is_gimple_val, fb_rvalue);
7556
                r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7557
                                    post_p, is_gimple_val, fb_rvalue);
7558
 
7559
                ret = MIN (r0, r1);
7560
                break;
7561
              }
7562
 
7563
            expr_3:
7564
              {
7565
                enum gimplify_status r0, r1, r2;
7566
 
7567
                r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7568
                                    post_p, is_gimple_val, fb_rvalue);
7569
                r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7570
                                    post_p, is_gimple_val, fb_rvalue);
7571
                r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
7572
                                    post_p, is_gimple_val, fb_rvalue);
7573
 
7574
                ret = MIN (MIN (r0, r1), r2);
7575
                break;
7576
              }
7577
 
7578
            case tcc_declaration:
7579
            case tcc_constant:
7580
              ret = GS_ALL_DONE;
7581
              goto dont_recalculate;
7582
 
7583
            default:
7584
              gcc_unreachable ();
7585
            }
7586
 
7587
          recalculate_side_effects (*expr_p);
7588
 
7589
        dont_recalculate:
7590
          break;
7591
        }
7592
 
7593
      gcc_assert (*expr_p || ret != GS_OK);
7594
    }
7595
  while (ret == GS_OK);
7596
 
7597
  /* If we encountered an error_mark somewhere nested inside, either
7598
     stub out the statement or propagate the error back out.  */
7599
  if (ret == GS_ERROR)
7600
    {
7601
      if (is_statement)
7602
        *expr_p = NULL;
7603
      goto out;
7604
    }
7605
 
7606
  /* This was only valid as a return value from the langhook, which
7607
     we handled.  Make sure it doesn't escape from any other context.  */
7608
  gcc_assert (ret != GS_UNHANDLED);
7609
 
7610
  if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7611
    {
7612
      /* We aren't looking for a value, and we don't have a valid
7613
         statement.  If it doesn't have side-effects, throw it away.  */
7614
      if (!TREE_SIDE_EFFECTS (*expr_p))
7615
        *expr_p = NULL;
7616
      else if (!TREE_THIS_VOLATILE (*expr_p))
7617
        {
7618
          /* This is probably a _REF that contains something nested that
7619
             has side effects.  Recurse through the operands to find it.  */
7620
          enum tree_code code = TREE_CODE (*expr_p);
7621
 
7622
          switch (code)
7623
            {
7624
            case COMPONENT_REF:
7625
            case REALPART_EXPR:
7626
            case IMAGPART_EXPR:
7627
            case VIEW_CONVERT_EXPR:
7628
              gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7629
                             gimple_test_f, fallback);
7630
              break;
7631
 
7632
            case ARRAY_REF:
7633
            case ARRAY_RANGE_REF:
7634
              gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7635
                             gimple_test_f, fallback);
7636
              gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7637
                             gimple_test_f, fallback);
7638
              break;
7639
 
7640
            default:
7641
               /* Anything else with side-effects must be converted to
7642
                  a valid statement before we get here.  */
7643
              gcc_unreachable ();
7644
            }
7645
 
7646
          *expr_p = NULL;
7647
        }
7648
      else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7649
               && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7650
        {
7651
          /* Historically, the compiler has treated a bare reference
7652
             to a non-BLKmode volatile lvalue as forcing a load.  */
7653
          tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7654
 
7655
          /* Normally, we do not want to create a temporary for a
7656
             TREE_ADDRESSABLE type because such a type should not be
7657
             copied by bitwise-assignment.  However, we make an
7658
             exception here, as all we are doing here is ensuring that
7659
             we read the bytes that make up the type.  We use
7660
             create_tmp_var_raw because create_tmp_var will abort when
7661
             given a TREE_ADDRESSABLE type.  */
7662
          tree tmp = create_tmp_var_raw (type, "vol");
7663
          gimple_add_tmp_var (tmp);
7664
          gimplify_assign (tmp, *expr_p, pre_p);
7665
          *expr_p = NULL;
7666
        }
7667
      else
7668
        /* We can't do anything useful with a volatile reference to
7669
           an incomplete type, so just throw it away.  Likewise for
7670
           a BLKmode type, since any implicit inner load should
7671
           already have been turned into an explicit one by the
7672
           gimplification process.  */
7673
        *expr_p = NULL;
7674
    }
7675
 
7676
  /* If we are gimplifying at the statement level, we're done.  Tack
7677
     everything together and return.  */
7678
  if (fallback == fb_none || is_statement)
7679
    {
7680
      /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7681
         it out for GC to reclaim it.  */
7682
      *expr_p = NULL_TREE;
7683
 
7684
      if (!gimple_seq_empty_p (internal_pre)
7685
          || !gimple_seq_empty_p (internal_post))
7686
        {
7687
          gimplify_seq_add_seq (&internal_pre, internal_post);
7688
          gimplify_seq_add_seq (pre_p, internal_pre);
7689
        }
7690
 
7691
      /* The result of gimplifying *EXPR_P is going to be the last few
7692
         statements in *PRE_P and *POST_P.  Add location information
7693
         to all the statements that were added by the gimplification
7694
         helpers.  */
7695
      if (!gimple_seq_empty_p (*pre_p))
7696
        annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7697
 
7698
      if (!gimple_seq_empty_p (*post_p))
7699
        annotate_all_with_location_after (*post_p, post_last_gsi,
7700
                                          input_location);
7701
 
7702
      goto out;
7703
    }
7704
 
7705
#ifdef ENABLE_GIMPLE_CHECKING
7706
  if (*expr_p)
7707
    {
7708
      enum tree_code code = TREE_CODE (*expr_p);
7709
      /* These expressions should already be in gimple IR form.  */
7710
      gcc_assert (code != MODIFY_EXPR
7711
                  && code != ASM_EXPR
7712
                  && code != BIND_EXPR
7713
                  && code != CATCH_EXPR
7714
                  && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7715
                  && code != EH_FILTER_EXPR
7716
                  && code != GOTO_EXPR
7717
                  && code != LABEL_EXPR
7718
                  && code != LOOP_EXPR
7719
                  && code != SWITCH_EXPR
7720
                  && code != TRY_FINALLY_EXPR
7721
                  && code != OMP_CRITICAL
7722
                  && code != OMP_FOR
7723
                  && code != OMP_MASTER
7724
                  && code != OMP_ORDERED
7725
                  && code != OMP_PARALLEL
7726
                  && code != OMP_SECTIONS
7727
                  && code != OMP_SECTION
7728
                  && code != OMP_SINGLE);
7729
    }
7730
#endif
7731
 
7732
  /* Otherwise we're gimplifying a subexpression, so the resulting
7733
     value is interesting.  If it's a valid operand that matches
7734
     GIMPLE_TEST_F, we're done. Unless we are handling some
7735
     post-effects internally; if that's the case, we need to copy into
7736
     a temporary before adding the post-effects to POST_P.  */
7737
  if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7738
    goto out;
7739
 
7740
  /* Otherwise, we need to create a new temporary for the gimplified
7741
     expression.  */
7742
 
7743
  /* We can't return an lvalue if we have an internal postqueue.  The
7744
     object the lvalue refers to would (probably) be modified by the
7745
     postqueue; we need to copy the value out first, which means an
7746
     rvalue.  */
7747
  if ((fallback & fb_lvalue)
7748
      && gimple_seq_empty_p (internal_post)
7749
      && is_gimple_addressable (*expr_p))
7750
    {
7751
      /* An lvalue will do.  Take the address of the expression, store it
7752
         in a temporary, and replace the expression with an INDIRECT_REF of
7753
         that temporary.  */
7754
      tmp = build_fold_addr_expr_loc (input_location, *expr_p);
7755
      gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7756
      *expr_p = build_simple_mem_ref (tmp);
7757
    }
7758
  else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
7759
    {
7760
      /* An rvalue will do.  Assign the gimplified expression into a
7761
         new temporary TMP and replace the original expression with
7762
         TMP.  First, make sure that the expression has a type so that
7763
         it can be assigned into a temporary.  */
7764
      gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
7765
 
7766
      if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
7767
        /* The postqueue might change the value of the expression between
7768
           the initialization and use of the temporary, so we can't use a
7769
           formal temp.  FIXME do we care?  */
7770
        {
7771
          *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7772
          if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE
7773
              || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE)
7774
            DECL_GIMPLE_REG_P (*expr_p) = 1;
7775
        }
7776
      else
7777
        *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7778
    }
7779
  else
7780
    {
7781
#ifdef ENABLE_GIMPLE_CHECKING
7782
      if (!(fallback & fb_mayfail))
7783
        {
7784
          fprintf (stderr, "gimplification failed:\n");
7785
          print_generic_expr (stderr, *expr_p, 0);
7786
          debug_tree (*expr_p);
7787
          internal_error ("gimplification failed");
7788
        }
7789
#endif
7790
      gcc_assert (fallback & fb_mayfail);
7791
 
7792
      /* If this is an asm statement, and the user asked for the
7793
         impossible, don't die.  Fail and let gimplify_asm_expr
7794
         issue an error.  */
7795
      ret = GS_ERROR;
7796
      goto out;
7797
    }
7798
 
7799
  /* Make sure the temporary matches our predicate.  */
7800
  gcc_assert ((*gimple_test_f) (*expr_p));
7801
 
7802
  if (!gimple_seq_empty_p (internal_post))
7803
    {
7804
      annotate_all_with_location (internal_post, input_location);
7805
      gimplify_seq_add_seq (pre_p, internal_post);
7806
    }
7807
 
7808
 out:
7809
  input_location = saved_location;
7810
  return ret;
7811
}
7812
 
7813
/* Look through TYPE for variable-sized objects and gimplify each such
7814
   size that we find.  Add to LIST_P any statements generated.  */
7815
 
7816
void
7817
gimplify_type_sizes (tree type, gimple_seq *list_p)
7818
{
7819
  tree field, t;
7820
 
7821
  if (type == NULL || type == error_mark_node)
7822
    return;
7823
 
7824
  /* We first do the main variant, then copy into any other variants.  */
7825
  type = TYPE_MAIN_VARIANT (type);
7826
 
7827
  /* Avoid infinite recursion.  */
7828
  if (TYPE_SIZES_GIMPLIFIED (type))
7829
    return;
7830
 
7831
  TYPE_SIZES_GIMPLIFIED (type) = 1;
7832
 
7833
  switch (TREE_CODE (type))
7834
    {
7835
    case INTEGER_TYPE:
7836
    case ENUMERAL_TYPE:
7837
    case BOOLEAN_TYPE:
7838
    case REAL_TYPE:
7839
    case FIXED_POINT_TYPE:
7840
      gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7841
      gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
7842
 
7843
      for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7844
        {
7845
          TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7846
          TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
7847
        }
7848
      break;
7849
 
7850
    case ARRAY_TYPE:
7851
      /* These types may not have declarations, so handle them here.  */
7852
      gimplify_type_sizes (TREE_TYPE (type), list_p);
7853
      gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7854
      /* Ensure VLA bounds aren't removed, for -O0 they should be variables
7855
         with assigned stack slots, for -O1+ -g they should be tracked
7856
         by VTA.  */
7857
      if (!(TYPE_NAME (type)
7858
            && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
7859
            && DECL_IGNORED_P (TYPE_NAME (type)))
7860
          && TYPE_DOMAIN (type)
7861
          && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7862
        {
7863
          t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7864
          if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7865
            DECL_IGNORED_P (t) = 0;
7866
          t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7867
          if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7868
            DECL_IGNORED_P (t) = 0;
7869
        }
7870
      break;
7871
 
7872
    case RECORD_TYPE:
7873
    case UNION_TYPE:
7874
    case QUAL_UNION_TYPE:
7875
      for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7876
        if (TREE_CODE (field) == FIELD_DECL)
7877
          {
7878
            gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7879
            gimplify_one_sizepos (&DECL_SIZE (field), list_p);
7880
            gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
7881
            gimplify_type_sizes (TREE_TYPE (field), list_p);
7882
          }
7883
      break;
7884
 
7885
    case POINTER_TYPE:
7886
    case REFERENCE_TYPE:
7887
        /* We used to recurse on the pointed-to type here, which turned out to
7888
           be incorrect because its definition might refer to variables not
7889
           yet initialized at this point if a forward declaration is involved.
7890
 
7891
           It was actually useful for anonymous pointed-to types to ensure
7892
           that the sizes evaluation dominates every possible later use of the
7893
           values.  Restricting to such types here would be safe since there
7894
           is no possible forward declaration around, but would introduce an
7895
           undesirable middle-end semantic to anonymity.  We then defer to
7896
           front-ends the responsibility of ensuring that the sizes are
7897
           evaluated both early and late enough, e.g. by attaching artificial
7898
           type declarations to the tree.  */
7899
      break;
7900
 
7901
    default:
7902
      break;
7903
    }
7904
 
7905
  gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7906
  gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
7907
 
7908
  for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7909
    {
7910
      TYPE_SIZE (t) = TYPE_SIZE (type);
7911
      TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7912
      TYPE_SIZES_GIMPLIFIED (t) = 1;
7913
    }
7914
}
7915
 
7916
/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7917
   a size or position, has had all of its SAVE_EXPRs evaluated.
7918
   We add any required statements to *STMT_P.  */
7919
 
7920
void
7921
gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
7922
{
7923
  tree type, expr = *expr_p;
7924
 
7925
  /* We don't do anything if the value isn't there, is constant, or contains
7926
     A PLACEHOLDER_EXPR.  We also don't want to do anything if it's already
7927
     a VAR_DECL.  If it's a VAR_DECL from another function, the gimplifier
7928
     will want to replace it with a new variable, but that will cause problems
7929
     if this type is from outside the function.  It's OK to have that here.  */
7930
  if (expr == NULL_TREE || TREE_CONSTANT (expr)
7931
      || TREE_CODE (expr) == VAR_DECL
7932
      || CONTAINS_PLACEHOLDER_P (expr))
7933
    return;
7934
 
7935
  type = TREE_TYPE (expr);
7936
  *expr_p = unshare_expr (expr);
7937
 
7938
  gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
7939
  expr = *expr_p;
7940
 
7941
  /* Verify that we've an exact type match with the original expression.
7942
     In particular, we do not wish to drop a "sizetype" in favour of a
7943
     type of similar dimensions.  We don't want to pollute the generic
7944
     type-stripping code with this knowledge because it doesn't matter
7945
     for the bulk of GENERIC/GIMPLE.  It only matters that TYPE_SIZE_UNIT
7946
     and friends retain their "sizetype-ness".  */
7947
  if (TREE_TYPE (expr) != type
7948
      && TREE_CODE (type) == INTEGER_TYPE
7949
      && TYPE_IS_SIZETYPE (type))
7950
    {
7951
      tree tmp;
7952
      gimple stmt;
7953
 
7954
      *expr_p = create_tmp_var (type, NULL);
7955
      tmp = build1 (NOP_EXPR, type, expr);
7956
      stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7957
      gimple_set_location (stmt, EXPR_LOC_OR_HERE (expr));
7958
    }
7959
}
7960
 
7961
/* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
7962
   containing the sequence of corresponding GIMPLE statements.  If DO_PARMS
7963
   is true, also gimplify the parameters.  */
7964
 
7965
gimple
7966
gimplify_body (tree fndecl, bool do_parms)
7967
{
7968
  location_t saved_location = input_location;
7969
  gimple_seq parm_stmts, seq;
7970
  gimple outer_bind;
7971
  struct gimplify_ctx gctx;
7972
  struct cgraph_node *cgn;
7973
 
7974
  timevar_push (TV_TREE_GIMPLIFY);
7975
 
7976
  /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7977
     gimplification.  */
7978
  default_rtl_profile ();
7979
 
7980
  gcc_assert (gimplify_ctxp == NULL);
7981
  push_gimplify_context (&gctx);
7982
 
7983
  /* Unshare most shared trees in the body and in that of any nested functions.
7984
     It would seem we don't have to do this for nested functions because
7985
     they are supposed to be output and then the outer function gimplified
7986
     first, but the g++ front end doesn't always do it that way.  */
7987
  unshare_body (fndecl);
7988
  unvisit_body (fndecl);
7989
 
7990
  cgn = cgraph_get_node (fndecl);
7991
  if (cgn && cgn->origin)
7992
    nonlocal_vlas = pointer_set_create ();
7993
 
7994
  /* Make sure input_location isn't set to something weird.  */
7995
  input_location = DECL_SOURCE_LOCATION (fndecl);
7996
 
7997
  /* Resolve callee-copies.  This has to be done before processing
7998
     the body so that DECL_VALUE_EXPR gets processed correctly.  */
7999
  parm_stmts = do_parms ? gimplify_parameters () : NULL;
8000
 
8001
  /* Gimplify the function's body.  */
8002
  seq = NULL;
8003
  gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
8004
  outer_bind = gimple_seq_first_stmt (seq);
8005
  if (!outer_bind)
8006
    {
8007
      outer_bind = gimple_build_nop ();
8008
      gimplify_seq_add_stmt (&seq, outer_bind);
8009
    }
8010
 
8011
  /* The body must contain exactly one statement, a GIMPLE_BIND.  If this is
8012
     not the case, wrap everything in a GIMPLE_BIND to make it so.  */
8013
  if (gimple_code (outer_bind) == GIMPLE_BIND
8014
      && gimple_seq_first (seq) == gimple_seq_last (seq))
8015
    ;
8016
  else
8017
    outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8018
 
8019
  DECL_SAVED_TREE (fndecl) = NULL_TREE;
8020
 
8021
  /* If we had callee-copies statements, insert them at the beginning
8022
     of the function and clear DECL_VALUE_EXPR_P on the parameters.  */
8023
  if (!gimple_seq_empty_p (parm_stmts))
8024
    {
8025
      tree parm;
8026
 
8027
      gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8028
      gimple_bind_set_body (outer_bind, parm_stmts);
8029
 
8030
      for (parm = DECL_ARGUMENTS (current_function_decl);
8031
           parm; parm = DECL_CHAIN (parm))
8032
        if (DECL_HAS_VALUE_EXPR_P (parm))
8033
          {
8034
            DECL_HAS_VALUE_EXPR_P (parm) = 0;
8035
            DECL_IGNORED_P (parm) = 0;
8036
          }
8037
    }
8038
 
8039
  if (nonlocal_vlas)
8040
    {
8041
      pointer_set_destroy (nonlocal_vlas);
8042
      nonlocal_vlas = NULL;
8043
    }
8044
 
8045
  pop_gimplify_context (outer_bind);
8046
  gcc_assert (gimplify_ctxp == NULL);
8047
 
8048
  if (!seen_error ())
8049
    verify_gimple_in_seq (gimple_bind_body (outer_bind));
8050
 
8051
  timevar_pop (TV_TREE_GIMPLIFY);
8052
  input_location = saved_location;
8053
 
8054
  return outer_bind;
8055
}
8056
 
8057
typedef char *char_p; /* For DEF_VEC_P.  */
8058
DEF_VEC_P(char_p);
8059
DEF_VEC_ALLOC_P(char_p,heap);
8060
 
8061
/* Return whether we should exclude FNDECL from instrumentation.  */
8062
 
8063
static bool
8064
flag_instrument_functions_exclude_p (tree fndecl)
8065
{
8066
  VEC(char_p,heap) *vec;
8067
 
8068
  vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_functions;
8069
  if (VEC_length (char_p, vec) > 0)
8070
    {
8071
      const char *name;
8072
      int i;
8073
      char *s;
8074
 
8075
      name = lang_hooks.decl_printable_name (fndecl, 0);
8076
      FOR_EACH_VEC_ELT (char_p, vec, i, s)
8077
        if (strstr (name, s) != NULL)
8078
          return true;
8079
    }
8080
 
8081
  vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_files;
8082
  if (VEC_length (char_p, vec) > 0)
8083
    {
8084
      const char *name;
8085
      int i;
8086
      char *s;
8087
 
8088
      name = DECL_SOURCE_FILE (fndecl);
8089
      FOR_EACH_VEC_ELT (char_p, vec, i, s)
8090
        if (strstr (name, s) != NULL)
8091
          return true;
8092
    }
8093
 
8094
  return false;
8095
}
8096
 
8097
/* Entry point to the gimplification pass.  FNDECL is the FUNCTION_DECL
8098
   node for the function we want to gimplify.
8099
 
8100
   Return the sequence of GIMPLE statements corresponding to the body
8101
   of FNDECL.  */
8102
 
8103
void
8104
gimplify_function_tree (tree fndecl)
8105
{
8106
  tree oldfn, parm, ret;
8107
  gimple_seq seq;
8108
  gimple bind;
8109
 
8110
  gcc_assert (!gimple_body (fndecl));
8111
 
8112
  oldfn = current_function_decl;
8113
  current_function_decl = fndecl;
8114
  if (DECL_STRUCT_FUNCTION (fndecl))
8115
    push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8116
  else
8117
    push_struct_function (fndecl);
8118
 
8119
  for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
8120
    {
8121
      /* Preliminarily mark non-addressed complex variables as eligible
8122
         for promotion to gimple registers.  We'll transform their uses
8123
         as we find them.  */
8124
      if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8125
           || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
8126
          && !TREE_THIS_VOLATILE (parm)
8127
          && !needs_to_live_in_memory (parm))
8128
        DECL_GIMPLE_REG_P (parm) = 1;
8129
    }
8130
 
8131
  ret = DECL_RESULT (fndecl);
8132
  if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
8133
       || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
8134
      && !needs_to_live_in_memory (ret))
8135
    DECL_GIMPLE_REG_P (ret) = 1;
8136
 
8137
  bind = gimplify_body (fndecl, true);
8138
 
8139
  /* The tree body of the function is no longer needed, replace it
8140
     with the new GIMPLE body.  */
8141
  seq = gimple_seq_alloc ();
8142
  gimple_seq_add_stmt (&seq, bind);
8143
  gimple_set_body (fndecl, seq);
8144
 
8145
  /* If we're instrumenting function entry/exit, then prepend the call to
8146
     the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8147
     catch the exit hook.  */
8148
  /* ??? Add some way to ignore exceptions for this TFE.  */
8149
  if (flag_instrument_function_entry_exit
8150
      && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8151
      && !flag_instrument_functions_exclude_p (fndecl))
8152
    {
8153
      tree x;
8154
      gimple new_bind;
8155
      gimple tf;
8156
      gimple_seq cleanup = NULL, body = NULL;
8157
      tree tmp_var;
8158
      gimple call;
8159
 
8160
      x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8161
      call = gimple_build_call (x, 1, integer_zero_node);
8162
      tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8163
      gimple_call_set_lhs (call, tmp_var);
8164
      gimplify_seq_add_stmt (&cleanup, call);
8165
      x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
8166
      call = gimple_build_call (x, 2,
8167
                                build_fold_addr_expr (current_function_decl),
8168
                                tmp_var);
8169
      gimplify_seq_add_stmt (&cleanup, call);
8170
      tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
8171
 
8172
      x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8173
      call = gimple_build_call (x, 1, integer_zero_node);
8174
      tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8175
      gimple_call_set_lhs (call, tmp_var);
8176
      gimplify_seq_add_stmt (&body, call);
8177
      x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
8178
      call = gimple_build_call (x, 2,
8179
                                build_fold_addr_expr (current_function_decl),
8180
                                tmp_var);
8181
      gimplify_seq_add_stmt (&body, call);
8182
      gimplify_seq_add_stmt (&body, tf);
8183
      new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
8184
      /* Clear the block for BIND, since it is no longer directly inside
8185
         the function, but within a try block.  */
8186
      gimple_bind_set_block (bind, NULL);
8187
 
8188
      /* Replace the current function body with the body
8189
         wrapped in the try/finally TF.  */
8190
      seq = gimple_seq_alloc ();
8191
      gimple_seq_add_stmt (&seq, new_bind);
8192
      gimple_set_body (fndecl, seq);
8193
    }
8194
 
8195
  DECL_SAVED_TREE (fndecl) = NULL_TREE;
8196
  cfun->curr_properties = PROP_gimple_any;
8197
 
8198
  current_function_decl = oldfn;
8199
  pop_cfun ();
8200
}
8201
 
8202
/* Some transformations like inlining may invalidate the GIMPLE form
8203
   for operands.  This function traverses all the operands in STMT and
8204
   gimplifies anything that is not a valid gimple operand.  Any new
8205
   GIMPLE statements are inserted before *GSI_P.  */
8206
 
8207
void
8208
gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
8209
{
8210
  size_t i, num_ops;
8211
  tree orig_lhs = NULL_TREE, lhs, t;
8212
  gimple_seq pre = NULL;
8213
  gimple post_stmt = NULL;
8214
  struct gimplify_ctx gctx;
8215
 
8216
  push_gimplify_context (&gctx);
8217
  gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8218
 
8219
  switch (gimple_code (stmt))
8220
    {
8221
    case GIMPLE_COND:
8222
      gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
8223
                     is_gimple_val, fb_rvalue);
8224
      gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
8225
                     is_gimple_val, fb_rvalue);
8226
      break;
8227
    case GIMPLE_SWITCH:
8228
      gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
8229
                     is_gimple_val, fb_rvalue);
8230
      break;
8231
    case GIMPLE_OMP_ATOMIC_LOAD:
8232
      gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
8233
                     is_gimple_val, fb_rvalue);
8234
      break;
8235
    case GIMPLE_ASM:
8236
      {
8237
        size_t i, noutputs = gimple_asm_noutputs (stmt);
8238
        const char *constraint, **oconstraints;
8239
        bool allows_mem, allows_reg, is_inout;
8240
 
8241
        oconstraints
8242
          = (const char **) alloca ((noutputs) * sizeof (const char *));
8243
        for (i = 0; i < noutputs; i++)
8244
          {
8245
            tree op = gimple_asm_output_op (stmt, i);
8246
            constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8247
            oconstraints[i] = constraint;
8248
            parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
8249
                                     &allows_reg, &is_inout);
8250
            gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8251
                           is_inout ? is_gimple_min_lval : is_gimple_lvalue,
8252
                           fb_lvalue | fb_mayfail);
8253
          }
8254
        for (i = 0; i < gimple_asm_ninputs (stmt); i++)
8255
          {
8256
            tree op = gimple_asm_input_op (stmt, i);
8257
            constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8258
            parse_input_constraint (&constraint, 0, 0, noutputs, 0,
8259
                                    oconstraints, &allows_mem, &allows_reg);
8260
            if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
8261
              allows_reg = 0;
8262
            if (!allows_reg && allows_mem)
8263
              gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8264
                             is_gimple_lvalue, fb_lvalue | fb_mayfail);
8265
            else
8266
              gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8267
                             is_gimple_asm_val, fb_rvalue);
8268
          }
8269
      }
8270
      break;
8271
    default:
8272
      /* NOTE: We start gimplifying operands from last to first to
8273
         make sure that side-effects on the RHS of calls, assignments
8274
         and ASMs are executed before the LHS.  The ordering is not
8275
         important for other statements.  */
8276
      num_ops = gimple_num_ops (stmt);
8277
      orig_lhs = gimple_get_lhs (stmt);
8278
      for (i = num_ops; i > 0; i--)
8279
        {
8280
          tree op = gimple_op (stmt, i - 1);
8281
          if (op == NULL_TREE)
8282
            continue;
8283
          if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
8284
            gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
8285
          else if (i == 2
8286
                   && is_gimple_assign (stmt)
8287
                   && num_ops == 2
8288
                   && get_gimple_rhs_class (gimple_expr_code (stmt))
8289
                      == GIMPLE_SINGLE_RHS)
8290
            gimplify_expr (&op, &pre, NULL,
8291
                           rhs_predicate_for (gimple_assign_lhs (stmt)),
8292
                           fb_rvalue);
8293
          else if (i == 2 && is_gimple_call (stmt))
8294
            {
8295
              if (TREE_CODE (op) == FUNCTION_DECL)
8296
                continue;
8297
              gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
8298
            }
8299
          else
8300
            gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
8301
          gimple_set_op (stmt, i - 1, op);
8302
        }
8303
 
8304
      lhs = gimple_get_lhs (stmt);
8305
      /* If the LHS changed it in a way that requires a simple RHS,
8306
         create temporary.  */
8307
      if (lhs && !is_gimple_reg (lhs))
8308
        {
8309
          bool need_temp = false;
8310
 
8311
          if (is_gimple_assign (stmt)
8312
              && num_ops == 2
8313
              && get_gimple_rhs_class (gimple_expr_code (stmt))
8314
                 == GIMPLE_SINGLE_RHS)
8315
            gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
8316
                           rhs_predicate_for (gimple_assign_lhs (stmt)),
8317
                           fb_rvalue);
8318
          else if (is_gimple_reg (lhs))
8319
            {
8320
              if (is_gimple_reg_type (TREE_TYPE (lhs)))
8321
                {
8322
                  if (is_gimple_call (stmt))
8323
                    {
8324
                      i = gimple_call_flags (stmt);
8325
                      if ((i & ECF_LOOPING_CONST_OR_PURE)
8326
                          || !(i & (ECF_CONST | ECF_PURE)))
8327
                        need_temp = true;
8328
                    }
8329
                  if (stmt_can_throw_internal (stmt))
8330
                    need_temp = true;
8331
                }
8332
            }
8333
          else
8334
            {
8335
              if (is_gimple_reg_type (TREE_TYPE (lhs)))
8336
                need_temp = true;
8337
              else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
8338
                {
8339
                  if (is_gimple_call (stmt))
8340
                    {
8341
                      tree fndecl = gimple_call_fndecl (stmt);
8342
 
8343
                      if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
8344
                          && !(fndecl && DECL_RESULT (fndecl)
8345
                               && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
8346
                        need_temp = true;
8347
                    }
8348
                  else
8349
                    need_temp = true;
8350
                }
8351
            }
8352
          if (need_temp)
8353
            {
8354
              tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
8355
 
8356
              if (TREE_CODE (orig_lhs) == SSA_NAME)
8357
                orig_lhs = SSA_NAME_VAR (orig_lhs);
8358
 
8359
              if (gimple_in_ssa_p (cfun))
8360
                temp = make_ssa_name (temp, NULL);
8361
              gimple_set_lhs (stmt, temp);
8362
              post_stmt = gimple_build_assign (lhs, temp);
8363
              if (TREE_CODE (lhs) == SSA_NAME)
8364
                SSA_NAME_DEF_STMT (lhs) = post_stmt;
8365
            }
8366
        }
8367
      break;
8368
    }
8369
 
8370
  if (gimple_referenced_vars (cfun))
8371
    for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
8372
      add_referenced_var (t);
8373
 
8374
  if (!gimple_seq_empty_p (pre))
8375
    {
8376
      if (gimple_in_ssa_p (cfun))
8377
        {
8378
          gimple_stmt_iterator i;
8379
 
8380
          for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
8381
            mark_symbols_for_renaming (gsi_stmt (i));
8382
        }
8383
      gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
8384
    }
8385
  if (post_stmt)
8386
    gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
8387
 
8388
  pop_gimplify_context (NULL);
8389
}
8390
 
8391
/* Expand EXPR to list of gimple statements STMTS.  GIMPLE_TEST_F specifies
8392
   the predicate that will hold for the result.  If VAR is not NULL, make the
8393
   base variable of the final destination be VAR if suitable.  */
8394
 
8395
tree
8396
force_gimple_operand_1 (tree expr, gimple_seq *stmts,
8397
                        gimple_predicate gimple_test_f, tree var)
8398
{
8399
  tree t;
8400
  enum gimplify_status ret;
8401
  struct gimplify_ctx gctx;
8402
 
8403
  *stmts = NULL;
8404
 
8405
  /* gimple_test_f might be more strict than is_gimple_val, make
8406
     sure we pass both.  Just checking gimple_test_f doesn't work
8407
     because most gimple predicates do not work recursively.  */
8408
  if (is_gimple_val (expr)
8409
      && (*gimple_test_f) (expr))
8410
    return expr;
8411
 
8412
  push_gimplify_context (&gctx);
8413
  gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8414
  gimplify_ctxp->allow_rhs_cond_expr = true;
8415
 
8416
  if (var)
8417
    expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
8418
 
8419
  if (TREE_CODE (expr) != MODIFY_EXPR
8420
      && TREE_TYPE (expr) == void_type_node)
8421
    {
8422
      gimplify_and_add (expr, stmts);
8423
      expr = NULL_TREE;
8424
    }
8425
  else
8426
    {
8427
      ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
8428
      gcc_assert (ret != GS_ERROR);
8429
    }
8430
 
8431
  if (gimple_referenced_vars (cfun))
8432
    for (t = gimplify_ctxp->temps; t ; t = DECL_CHAIN (t))
8433
      add_referenced_var (t);
8434
 
8435
  pop_gimplify_context (NULL);
8436
 
8437
  return expr;
8438
}
8439
 
8440
/* Expand EXPR to list of gimple statements STMTS.  If SIMPLE is true,
8441
   force the result to be either ssa_name or an invariant, otherwise
8442
   just force it to be a rhs expression.  If VAR is not NULL, make the
8443
   base variable of the final destination be VAR if suitable.  */
8444
 
8445
tree
8446
force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
8447
{
8448
  return force_gimple_operand_1 (expr, stmts,
8449
                                 simple ? is_gimple_val : is_gimple_reg_rhs,
8450
                                 var);
8451
}
8452
 
8453
/* Invoke force_gimple_operand_1 for EXPR with parameters GIMPLE_TEST_F
8454
   and VAR.  If some statements are produced, emits them at GSI.
8455
   If BEFORE is true.  the statements are appended before GSI, otherwise
8456
   they are appended after it.  M specifies the way GSI moves after
8457
   insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING are the usual values).  */
8458
 
8459
tree
8460
force_gimple_operand_gsi_1 (gimple_stmt_iterator *gsi, tree expr,
8461
                            gimple_predicate gimple_test_f,
8462
                            tree var, bool before,
8463
                            enum gsi_iterator_update m)
8464
{
8465
  gimple_seq stmts;
8466
 
8467
  expr = force_gimple_operand_1 (expr, &stmts, gimple_test_f, var);
8468
 
8469
  if (!gimple_seq_empty_p (stmts))
8470
    {
8471
      if (gimple_in_ssa_p (cfun))
8472
        {
8473
          gimple_stmt_iterator i;
8474
 
8475
          for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
8476
            mark_symbols_for_renaming (gsi_stmt (i));
8477
        }
8478
 
8479
      if (before)
8480
        gsi_insert_seq_before (gsi, stmts, m);
8481
      else
8482
        gsi_insert_seq_after (gsi, stmts, m);
8483
    }
8484
 
8485
  return expr;
8486
}
8487
 
8488
/* Invoke force_gimple_operand_1 for EXPR with parameter VAR.
8489
   If SIMPLE is true, force the result to be either ssa_name or an invariant,
8490
   otherwise just force it to be a rhs expression.  If some statements are
8491
   produced, emits them at GSI.  If BEFORE is true, the statements are
8492
   appended before GSI, otherwise they are appended after it.  M specifies
8493
   the way GSI moves after insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING
8494
   are the usual values).  */
8495
 
8496
tree
8497
force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
8498
                          bool simple_p, tree var, bool before,
8499
                          enum gsi_iterator_update m)
8500
{
8501
  return force_gimple_operand_gsi_1 (gsi, expr,
8502
                                     simple_p
8503
                                     ? is_gimple_val : is_gimple_reg_rhs,
8504
                                     var, before, m);
8505
}
8506
 
8507
 
8508
#include "gt-gimplify.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.