OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-stable/] [gcc-4.5.1/] [gcc/] [gimplify.c] - Blame information for rev 847

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Tree lowering pass.  This pass converts the GENERIC functions-as-trees
2
   tree representation into the GIMPLE form.
3
   Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4
   Free Software Foundation, Inc.
5
   Major work done by Sebastian Pop <s.pop@laposte.net>,
6
   Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7
 
8
This file is part of GCC.
9
 
10
GCC is free software; you can redistribute it and/or modify it under
11
the terms of the GNU General Public License as published by the Free
12
Software Foundation; either version 3, or (at your option) any later
13
version.
14
 
15
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16
WARRANTY; without even the implied warranty of MERCHANTABILITY or
17
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
18
for more details.
19
 
20
You should have received a copy of the GNU General Public License
21
along with GCC; see the file COPYING3.  If not see
22
<http://www.gnu.org/licenses/>.  */
23
 
24
#include "config.h"
25
#include "system.h"
26
#include "coretypes.h"
27
#include "tm.h"
28
#include "tree.h"
29
#include "rtl.h"
30
#include "varray.h"
31
#include "gimple.h"
32
#include "tree-iterator.h"
33
#include "tree-inline.h"
34
#include "diagnostic.h"
35
#include "langhooks.h"
36
#include "langhooks-def.h"
37
#include "tree-flow.h"
38
#include "cgraph.h"
39
#include "timevar.h"
40
#include "except.h"
41
#include "hashtab.h"
42
#include "flags.h"
43
#include "real.h"
44
#include "function.h"
45
#include "output.h"
46
#include "expr.h"
47
#include "ggc.h"
48
#include "toplev.h"
49
#include "target.h"
50
#include "optabs.h"
51
#include "pointer-set.h"
52
#include "splay-tree.h"
53
#include "vec.h"
54
#include "gimple.h"
55
#include "tree-pass.h"
56
 
57
 
58
enum gimplify_omp_var_data
59
{
60
  GOVD_SEEN = 1,
61
  GOVD_EXPLICIT = 2,
62
  GOVD_SHARED = 4,
63
  GOVD_PRIVATE = 8,
64
  GOVD_FIRSTPRIVATE = 16,
65
  GOVD_LASTPRIVATE = 32,
66
  GOVD_REDUCTION = 64,
67
  GOVD_LOCAL = 128,
68
  GOVD_DEBUG_PRIVATE = 256,
69
  GOVD_PRIVATE_OUTER_REF = 512,
70
  GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
71
                           | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
72
};
73
 
74
 
75
enum omp_region_type
76
{
77
  ORT_WORKSHARE = 0,
78
  ORT_PARALLEL = 2,
79
  ORT_COMBINED_PARALLEL = 3,
80
  ORT_TASK = 4,
81
  ORT_UNTIED_TASK = 5
82
};
83
 
84
struct gimplify_omp_ctx
85
{
86
  struct gimplify_omp_ctx *outer_context;
87
  splay_tree variables;
88
  struct pointer_set_t *privatized_types;
89
  location_t location;
90
  enum omp_clause_default_kind default_kind;
91
  enum omp_region_type region_type;
92
};
93
 
94
static struct gimplify_ctx *gimplify_ctxp;
95
static struct gimplify_omp_ctx *gimplify_omp_ctxp;
96
 
97
 
98
/* Formal (expression) temporary table handling: Multiple occurrences of
99
   the same scalar expression are evaluated into the same temporary.  */
100
 
101
typedef struct gimple_temp_hash_elt
102
{
103
  tree val;   /* Key */
104
  tree temp;  /* Value */
105
} elt_t;
106
 
107
/* Forward declarations.  */
108
static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
109
 
110
/* Mark X addressable.  Unlike the langhook we expect X to be in gimple
111
   form and we don't do any syntax checking.  */
112
void
113
mark_addressable (tree x)
114
{
115
  while (handled_component_p (x))
116
    x = TREE_OPERAND (x, 0);
117
  if (TREE_CODE (x) != VAR_DECL
118
      && TREE_CODE (x) != PARM_DECL
119
      && TREE_CODE (x) != RESULT_DECL)
120
    return ;
121
  TREE_ADDRESSABLE (x) = 1;
122
}
123
 
124
/* Return a hash value for a formal temporary table entry.  */
125
 
126
static hashval_t
127
gimple_tree_hash (const void *p)
128
{
129
  tree t = ((const elt_t *) p)->val;
130
  return iterative_hash_expr (t, 0);
131
}
132
 
133
/* Compare two formal temporary table entries.  */
134
 
135
static int
136
gimple_tree_eq (const void *p1, const void *p2)
137
{
138
  tree t1 = ((const elt_t *) p1)->val;
139
  tree t2 = ((const elt_t *) p2)->val;
140
  enum tree_code code = TREE_CODE (t1);
141
 
142
  if (TREE_CODE (t2) != code
143
      || TREE_TYPE (t1) != TREE_TYPE (t2))
144
    return 0;
145
 
146
  if (!operand_equal_p (t1, t2, 0))
147
    return 0;
148
 
149
  /* Only allow them to compare equal if they also hash equal; otherwise
150
     results are nondeterminate, and we fail bootstrap comparison.  */
151
  gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
152
 
153
  return 1;
154
}
155
 
156
/* Link gimple statement GS to the end of the sequence *SEQ_P.  If
157
   *SEQ_P is NULL, a new sequence is allocated.  This function is
158
   similar to gimple_seq_add_stmt, but does not scan the operands.
159
   During gimplification, we need to manipulate statement sequences
160
   before the def/use vectors have been constructed.  */
161
 
162
void
163
gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
164
{
165
  gimple_stmt_iterator si;
166
 
167
  if (gs == NULL)
168
    return;
169
 
170
  if (*seq_p == NULL)
171
    *seq_p = gimple_seq_alloc ();
172
 
173
  si = gsi_last (*seq_p);
174
 
175
  gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
176
}
177
 
178
/* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
179
   NULL, a new sequence is allocated.   This function is
180
   similar to gimple_seq_add_seq, but does not scan the operands.
181
   During gimplification, we need to manipulate statement sequences
182
   before the def/use vectors have been constructed.  */
183
 
184
static void
185
gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
186
{
187
  gimple_stmt_iterator si;
188
 
189
  if (src == NULL)
190
    return;
191
 
192
  if (*dst_p == NULL)
193
    *dst_p = gimple_seq_alloc ();
194
 
195
  si = gsi_last (*dst_p);
196
  gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
197
}
198
 
199
/* Set up a context for the gimplifier.  */
200
 
201
void
202
push_gimplify_context (struct gimplify_ctx *c)
203
{
204
  memset (c, '\0', sizeof (*c));
205
  c->prev_context = gimplify_ctxp;
206
  gimplify_ctxp = c;
207
}
208
 
209
/* Tear down a context for the gimplifier.  If BODY is non-null, then
210
   put the temporaries into the outer BIND_EXPR.  Otherwise, put them
211
   in the local_decls.
212
 
213
   BODY is not a sequence, but the first tuple in a sequence.  */
214
 
215
void
216
pop_gimplify_context (gimple body)
217
{
218
  struct gimplify_ctx *c = gimplify_ctxp;
219
 
220
  gcc_assert (c && (c->bind_expr_stack == NULL
221
                    || VEC_empty (gimple, c->bind_expr_stack)));
222
  VEC_free (gimple, heap, c->bind_expr_stack);
223
  gimplify_ctxp = c->prev_context;
224
 
225
  if (body)
226
    declare_vars (c->temps, body, false);
227
  else
228
    record_vars (c->temps);
229
 
230
  if (c->temp_htab)
231
    htab_delete (c->temp_htab);
232
}
233
 
234
static void
235
gimple_push_bind_expr (gimple gimple_bind)
236
{
237
  if (gimplify_ctxp->bind_expr_stack == NULL)
238
    gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
239
  VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
240
}
241
 
242
static void
243
gimple_pop_bind_expr (void)
244
{
245
  VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
246
}
247
 
248
gimple
249
gimple_current_bind_expr (void)
250
{
251
  return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
252
}
253
 
254
/* Return the stack GIMPLE_BINDs created during gimplification.  */
255
 
256
VEC(gimple, heap) *
257
gimple_bind_expr_stack (void)
258
{
259
  return gimplify_ctxp->bind_expr_stack;
260
}
261
 
262
/* Returns true iff there is a COND_EXPR between us and the innermost
263
   CLEANUP_POINT_EXPR.  This info is used by gimple_push_cleanup.  */
264
 
265
static bool
266
gimple_conditional_context (void)
267
{
268
  return gimplify_ctxp->conditions > 0;
269
}
270
 
271
/* Note that we've entered a COND_EXPR.  */
272
 
273
static void
274
gimple_push_condition (void)
275
{
276
#ifdef ENABLE_GIMPLE_CHECKING
277
  if (gimplify_ctxp->conditions == 0)
278
    gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
279
#endif
280
  ++(gimplify_ctxp->conditions);
281
}
282
 
283
/* Note that we've left a COND_EXPR.  If we're back at unconditional scope
284
   now, add any conditional cleanups we've seen to the prequeue.  */
285
 
286
static void
287
gimple_pop_condition (gimple_seq *pre_p)
288
{
289
  int conds = --(gimplify_ctxp->conditions);
290
 
291
  gcc_assert (conds >= 0);
292
  if (conds == 0)
293
    {
294
      gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
295
      gimplify_ctxp->conditional_cleanups = NULL;
296
    }
297
}
298
 
299
/* A stable comparison routine for use with splay trees and DECLs.  */
300
 
301
static int
302
splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
303
{
304
  tree a = (tree) xa;
305
  tree b = (tree) xb;
306
 
307
  return DECL_UID (a) - DECL_UID (b);
308
}
309
 
310
/* Create a new omp construct that deals with variable remapping.  */
311
 
312
static struct gimplify_omp_ctx *
313
new_omp_context (enum omp_region_type region_type)
314
{
315
  struct gimplify_omp_ctx *c;
316
 
317
  c = XCNEW (struct gimplify_omp_ctx);
318
  c->outer_context = gimplify_omp_ctxp;
319
  c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
320
  c->privatized_types = pointer_set_create ();
321
  c->location = input_location;
322
  c->region_type = region_type;
323
  if ((region_type & ORT_TASK) == 0)
324
    c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
325
  else
326
    c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
327
 
328
  return c;
329
}
330
 
331
/* Destroy an omp construct that deals with variable remapping.  */
332
 
333
static void
334
delete_omp_context (struct gimplify_omp_ctx *c)
335
{
336
  splay_tree_delete (c->variables);
337
  pointer_set_destroy (c->privatized_types);
338
  XDELETE (c);
339
}
340
 
341
static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
342
static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
343
 
344
/* A subroutine of append_to_statement_list{,_force}.  T is not NULL.  */
345
 
346
static void
347
append_to_statement_list_1 (tree t, tree *list_p)
348
{
349
  tree list = *list_p;
350
  tree_stmt_iterator i;
351
 
352
  if (!list)
353
    {
354
      if (t && TREE_CODE (t) == STATEMENT_LIST)
355
        {
356
          *list_p = t;
357
          return;
358
        }
359
      *list_p = list = alloc_stmt_list ();
360
    }
361
 
362
  i = tsi_last (list);
363
  tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
364
}
365
 
366
/* Add T to the end of the list container pointed to by LIST_P.
367
   If T is an expression with no effects, it is ignored.  */
368
 
369
void
370
append_to_statement_list (tree t, tree *list_p)
371
{
372
  if (t && TREE_SIDE_EFFECTS (t))
373
    append_to_statement_list_1 (t, list_p);
374
}
375
 
376
/* Similar, but the statement is always added, regardless of side effects.  */
377
 
378
void
379
append_to_statement_list_force (tree t, tree *list_p)
380
{
381
  if (t != NULL_TREE)
382
    append_to_statement_list_1 (t, list_p);
383
}
384
 
385
/* Both gimplify the statement T and append it to *SEQ_P.  This function
386
   behaves exactly as gimplify_stmt, but you don't have to pass T as a
387
   reference.  */
388
 
389
void
390
gimplify_and_add (tree t, gimple_seq *seq_p)
391
{
392
  gimplify_stmt (&t, seq_p);
393
}
394
 
395
/* Gimplify statement T into sequence *SEQ_P, and return the first
396
   tuple in the sequence of generated tuples for this statement.
397
   Return NULL if gimplifying T produced no tuples.  */
398
 
399
static gimple
400
gimplify_and_return_first (tree t, gimple_seq *seq_p)
401
{
402
  gimple_stmt_iterator last = gsi_last (*seq_p);
403
 
404
  gimplify_and_add (t, seq_p);
405
 
406
  if (!gsi_end_p (last))
407
    {
408
      gsi_next (&last);
409
      return gsi_stmt (last);
410
    }
411
  else
412
    return gimple_seq_first_stmt (*seq_p);
413
}
414
 
415
/* Strip off a legitimate source ending from the input string NAME of
416
   length LEN.  Rather than having to know the names used by all of
417
   our front ends, we strip off an ending of a period followed by
418
   up to five characters.  (Java uses ".class".)  */
419
 
420
static inline void
421
remove_suffix (char *name, int len)
422
{
423
  int i;
424
 
425
  for (i = 2;  i < 8 && len > i;  i++)
426
    {
427
      if (name[len - i] == '.')
428
        {
429
          name[len - i] = '\0';
430
          break;
431
        }
432
    }
433
}
434
 
435
/* Create a new temporary name with PREFIX.  Returns an identifier.  */
436
 
437
static GTY(()) unsigned int tmp_var_id_num;
438
 
439
tree
440
create_tmp_var_name (const char *prefix)
441
{
442
  char *tmp_name;
443
 
444
  if (prefix)
445
    {
446
      char *preftmp = ASTRDUP (prefix);
447
 
448
      remove_suffix (preftmp, strlen (preftmp));
449
      prefix = preftmp;
450
    }
451
 
452
  ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
453
  return get_identifier (tmp_name);
454
}
455
 
456
 
457
/* Create a new temporary variable declaration of type TYPE.
458
   Does NOT push it into the current binding.  */
459
 
460
tree
461
create_tmp_var_raw (tree type, const char *prefix)
462
{
463
  tree tmp_var;
464
  tree new_type;
465
 
466
  /* Make the type of the variable writable.  */
467
  new_type = build_type_variant (type, 0, 0);
468
  TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
469
 
470
  tmp_var = build_decl (input_location,
471
                        VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
472
                        type);
473
 
474
  /* The variable was declared by the compiler.  */
475
  DECL_ARTIFICIAL (tmp_var) = 1;
476
  /* And we don't want debug info for it.  */
477
  DECL_IGNORED_P (tmp_var) = 1;
478
 
479
  /* Make the variable writable.  */
480
  TREE_READONLY (tmp_var) = 0;
481
 
482
  DECL_EXTERNAL (tmp_var) = 0;
483
  TREE_STATIC (tmp_var) = 0;
484
  TREE_USED (tmp_var) = 1;
485
 
486
  return tmp_var;
487
}
488
 
489
/* Create a new temporary variable declaration of type TYPE.  DOES push the
490
   variable into the current binding.  Further, assume that this is called
491
   only from gimplification or optimization, at which point the creation of
492
   certain types are bugs.  */
493
 
494
tree
495
create_tmp_var (tree type, const char *prefix)
496
{
497
  tree tmp_var;
498
 
499
  /* We don't allow types that are addressable (meaning we can't make copies),
500
     or incomplete.  We also used to reject every variable size objects here,
501
     but now support those for which a constant upper bound can be obtained.
502
     The processing for variable sizes is performed in gimple_add_tmp_var,
503
     point at which it really matters and possibly reached via paths not going
504
     through this function, e.g. after direct calls to create_tmp_var_raw.  */
505
  gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
506
 
507
  tmp_var = create_tmp_var_raw (type, prefix);
508
  gimple_add_tmp_var (tmp_var);
509
  return tmp_var;
510
}
511
 
512
/* Create a temporary with a name derived from VAL.  Subroutine of
513
   lookup_tmp_var; nobody else should call this function.  */
514
 
515
static inline tree
516
create_tmp_from_val (tree val)
517
{
518
  return create_tmp_var (TREE_TYPE (val), get_name (val));
519
}
520
 
521
/* Create a temporary to hold the value of VAL.  If IS_FORMAL, try to reuse
522
   an existing expression temporary.  */
523
 
524
static tree
525
lookup_tmp_var (tree val, bool is_formal)
526
{
527
  tree ret;
528
 
529
  /* If not optimizing, never really reuse a temporary.  local-alloc
530
     won't allocate any variable that is used in more than one basic
531
     block, which means it will go into memory, causing much extra
532
     work in reload and final and poorer code generation, outweighing
533
     the extra memory allocation here.  */
534
  if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
535
    ret = create_tmp_from_val (val);
536
  else
537
    {
538
      elt_t elt, *elt_p;
539
      void **slot;
540
 
541
      elt.val = val;
542
      if (gimplify_ctxp->temp_htab == NULL)
543
        gimplify_ctxp->temp_htab
544
          = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
545
      slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
546
      if (*slot == NULL)
547
        {
548
          elt_p = XNEW (elt_t);
549
          elt_p->val = val;
550
          elt_p->temp = ret = create_tmp_from_val (val);
551
          *slot = (void *) elt_p;
552
        }
553
      else
554
        {
555
          elt_p = (elt_t *) *slot;
556
          ret = elt_p->temp;
557
        }
558
    }
559
 
560
  return ret;
561
}
562
 
563
 
564
/* Return true if T is a CALL_EXPR or an expression that can be
565
   assignmed to a temporary.  Note that this predicate should only be
566
   used during gimplification.  See the rationale for this in
567
   gimplify_modify_expr.  */
568
 
569
static bool
570
is_gimple_reg_rhs_or_call (tree t)
571
{
572
  return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
573
          || TREE_CODE (t) == CALL_EXPR);
574
}
575
 
576
/* Return true if T is a valid memory RHS or a CALL_EXPR.  Note that
577
   this predicate should only be used during gimplification.  See the
578
   rationale for this in gimplify_modify_expr.  */
579
 
580
static bool
581
is_gimple_mem_rhs_or_call (tree t)
582
{
583
  /* If we're dealing with a renamable type, either source or dest must be
584
     a renamed variable.  */
585
  if (is_gimple_reg_type (TREE_TYPE (t)))
586
    return is_gimple_val (t);
587
  else
588
    return (is_gimple_val (t) || is_gimple_lvalue (t)
589
            || TREE_CODE (t) == CALL_EXPR);
590
}
591
 
592
/* Helper for get_formal_tmp_var and get_initialized_tmp_var.  */
593
 
594
static tree
595
internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
596
                      bool is_formal)
597
{
598
  tree t, mod;
599
 
600
  /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
601
     can create an INIT_EXPR and convert it into a GIMPLE_CALL below.  */
602
  gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
603
                 fb_rvalue);
604
 
605
  t = lookup_tmp_var (val, is_formal);
606
 
607
  if (is_formal
608
      && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
609
          || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
610
    DECL_GIMPLE_REG_P (t) = 1;
611
 
612
  mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
613
 
614
  if (EXPR_HAS_LOCATION (val))
615
    SET_EXPR_LOCATION (mod, EXPR_LOCATION (val));
616
  else
617
    SET_EXPR_LOCATION (mod, input_location);
618
 
619
  /* gimplify_modify_expr might want to reduce this further.  */
620
  gimplify_and_add (mod, pre_p);
621
  ggc_free (mod);
622
 
623
  /* If we're gimplifying into ssa, gimplify_modify_expr will have
624
     given our temporary an SSA name.  Find and return it.  */
625
  if (gimplify_ctxp->into_ssa)
626
    {
627
      gimple last = gimple_seq_last_stmt (*pre_p);
628
      t = gimple_get_lhs (last);
629
    }
630
 
631
  return t;
632
}
633
 
634
/* Returns a formal temporary variable initialized with VAL.  PRE_P is as
635
   in gimplify_expr.  Only use this function if:
636
 
637
   1) The value of the unfactored expression represented by VAL will not
638
      change between the initialization and use of the temporary, and
639
   2) The temporary will not be otherwise modified.
640
 
641
   For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
642
   and #2 means it is inappropriate for && temps.
643
 
644
   For other cases, use get_initialized_tmp_var instead.  */
645
 
646
tree
647
get_formal_tmp_var (tree val, gimple_seq *pre_p)
648
{
649
  return internal_get_tmp_var (val, pre_p, NULL, true);
650
}
651
 
652
/* Returns a temporary variable initialized with VAL.  PRE_P and POST_P
653
   are as in gimplify_expr.  */
654
 
655
tree
656
get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
657
{
658
  return internal_get_tmp_var (val, pre_p, post_p, false);
659
}
660
 
661
/* Declares all the variables in VARS in SCOPE.  If DEBUG_INFO is
662
   true, generate debug info for them; otherwise don't.  */
663
 
664
void
665
declare_vars (tree vars, gimple scope, bool debug_info)
666
{
667
  tree last = vars;
668
  if (last)
669
    {
670
      tree temps, block;
671
 
672
      gcc_assert (gimple_code (scope) == GIMPLE_BIND);
673
 
674
      temps = nreverse (last);
675
 
676
      block = gimple_bind_block (scope);
677
      gcc_assert (!block || TREE_CODE (block) == BLOCK);
678
      if (!block || !debug_info)
679
        {
680
          TREE_CHAIN (last) = gimple_bind_vars (scope);
681
          gimple_bind_set_vars (scope, temps);
682
        }
683
      else
684
        {
685
          /* We need to attach the nodes both to the BIND_EXPR and to its
686
             associated BLOCK for debugging purposes.  The key point here
687
             is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
688
             is a subchain of the BIND_EXPR_VARS of the BIND_EXPR.  */
689
          if (BLOCK_VARS (block))
690
            BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
691
          else
692
            {
693
              gimple_bind_set_vars (scope,
694
                                    chainon (gimple_bind_vars (scope), temps));
695
              BLOCK_VARS (block) = temps;
696
            }
697
        }
698
    }
699
}
700
 
701
/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
702
   for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly.  Abort if
703
   no such upper bound can be obtained.  */
704
 
705
static void
706
force_constant_size (tree var)
707
{
708
  /* The only attempt we make is by querying the maximum size of objects
709
     of the variable's type.  */
710
 
711
  HOST_WIDE_INT max_size;
712
 
713
  gcc_assert (TREE_CODE (var) == VAR_DECL);
714
 
715
  max_size = max_int_size_in_bytes (TREE_TYPE (var));
716
 
717
  gcc_assert (max_size >= 0);
718
 
719
  DECL_SIZE_UNIT (var)
720
    = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
721
  DECL_SIZE (var)
722
    = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
723
}
724
 
725
void
726
gimple_add_tmp_var (tree tmp)
727
{
728
  gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
729
 
730
  /* Later processing assumes that the object size is constant, which might
731
     not be true at this point.  Force the use of a constant upper bound in
732
     this case.  */
733
  if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
734
    force_constant_size (tmp);
735
 
736
  DECL_CONTEXT (tmp) = current_function_decl;
737
  DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
738
 
739
  if (gimplify_ctxp)
740
    {
741
      TREE_CHAIN (tmp) = gimplify_ctxp->temps;
742
      gimplify_ctxp->temps = tmp;
743
 
744
      /* Mark temporaries local within the nearest enclosing parallel.  */
745
      if (gimplify_omp_ctxp)
746
        {
747
          struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
748
          while (ctx && ctx->region_type == ORT_WORKSHARE)
749
            ctx = ctx->outer_context;
750
          if (ctx)
751
            omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
752
        }
753
    }
754
  else if (cfun)
755
    record_vars (tmp);
756
  else
757
    {
758
      gimple_seq body_seq;
759
 
760
      /* This case is for nested functions.  We need to expose the locals
761
         they create.  */
762
      body_seq = gimple_body (current_function_decl);
763
      declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
764
    }
765
}
766
 
767
/* Determines whether to assign a location to the statement GS.  */
768
 
769
static bool
770
should_carry_location_p (gimple gs)
771
{
772
  /* Don't emit a line note for a label.  We particularly don't want to
773
     emit one for the break label, since it doesn't actually correspond
774
     to the beginning of the loop/switch.  */
775
  if (gimple_code (gs) == GIMPLE_LABEL)
776
    return false;
777
 
778
  return true;
779
}
780
 
781
 
782
/* Return true if a location should not be emitted for this statement
783
   by annotate_one_with_location.  */
784
 
785
static inline bool
786
gimple_do_not_emit_location_p (gimple g)
787
{
788
  return gimple_plf (g, GF_PLF_1);
789
}
790
 
791
/* Mark statement G so a location will not be emitted by
792
   annotate_one_with_location.  */
793
 
794
static inline void
795
gimple_set_do_not_emit_location (gimple g)
796
{
797
  /* The PLF flags are initialized to 0 when a new tuple is created,
798
     so no need to initialize it anywhere.  */
799
  gimple_set_plf (g, GF_PLF_1, true);
800
}
801
 
802
/* Set the location for gimple statement GS to LOCATION.  */
803
 
804
static void
805
annotate_one_with_location (gimple gs, location_t location)
806
{
807
  if (!gimple_has_location (gs)
808
      && !gimple_do_not_emit_location_p (gs)
809
      && should_carry_location_p (gs))
810
    gimple_set_location (gs, location);
811
}
812
 
813
 
814
/* Set LOCATION for all the statements after iterator GSI in sequence
815
   SEQ.  If GSI is pointing to the end of the sequence, start with the
816
   first statement in SEQ.  */
817
 
818
static void
819
annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
820
                                  location_t location)
821
{
822
  if (gsi_end_p (gsi))
823
    gsi = gsi_start (seq);
824
  else
825
    gsi_next (&gsi);
826
 
827
  for (; !gsi_end_p (gsi); gsi_next (&gsi))
828
    annotate_one_with_location (gsi_stmt (gsi), location);
829
}
830
 
831
 
832
/* Set the location for all the statements in a sequence STMT_P to LOCATION.  */
833
 
834
void
835
annotate_all_with_location (gimple_seq stmt_p, location_t location)
836
{
837
  gimple_stmt_iterator i;
838
 
839
  if (gimple_seq_empty_p (stmt_p))
840
    return;
841
 
842
  for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
843
    {
844
      gimple gs = gsi_stmt (i);
845
      annotate_one_with_location (gs, location);
846
    }
847
}
848
 
849
 
850
/* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
851
   These nodes model computations that should only be done once.  If we
852
   were to unshare something like SAVE_EXPR(i++), the gimplification
853
   process would create wrong code.  */
854
 
855
static tree
856
mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
857
{
858
  enum tree_code code = TREE_CODE (*tp);
859
  /* Don't unshare types, decls, constants and SAVE_EXPR nodes.  */
860
  if (TREE_CODE_CLASS (code) == tcc_type
861
      || TREE_CODE_CLASS (code) == tcc_declaration
862
      || TREE_CODE_CLASS (code) == tcc_constant
863
      || code == SAVE_EXPR || code == TARGET_EXPR
864
      /* We can't do anything sensible with a BLOCK used as an expression,
865
         but we also can't just die when we see it because of non-expression
866
         uses.  So just avert our eyes and cross our fingers.  Silly Java.  */
867
      || code == BLOCK)
868
    *walk_subtrees = 0;
869
  else
870
    {
871
      gcc_assert (code != BIND_EXPR);
872
      copy_tree_r (tp, walk_subtrees, data);
873
    }
874
 
875
  return NULL_TREE;
876
}
877
 
878
/* Callback for walk_tree to unshare most of the shared trees rooted at
879
   *TP.  If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
880
   then *TP is deep copied by calling copy_tree_r.
881
 
882
   This unshares the same trees as copy_tree_r with the exception of
883
   SAVE_EXPR nodes.  These nodes model computations that should only be
884
   done once.  If we were to unshare something like SAVE_EXPR(i++), the
885
   gimplification process would create wrong code.  */
886
 
887
static tree
888
copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
889
                  void *data ATTRIBUTE_UNUSED)
890
{
891
  tree t = *tp;
892
  enum tree_code code = TREE_CODE (t);
893
 
894
  /* Skip types, decls, and constants.  But we do want to look at their
895
     types and the bounds of types.  Mark them as visited so we properly
896
     unmark their subtrees on the unmark pass.  If we've already seen them,
897
     don't look down further.  */
898
  if (TREE_CODE_CLASS (code) == tcc_type
899
      || TREE_CODE_CLASS (code) == tcc_declaration
900
      || TREE_CODE_CLASS (code) == tcc_constant)
901
    {
902
      if (TREE_VISITED (t))
903
        *walk_subtrees = 0;
904
      else
905
        TREE_VISITED (t) = 1;
906
    }
907
 
908
  /* If this node has been visited already, unshare it and don't look
909
     any deeper.  */
910
  else if (TREE_VISITED (t))
911
    {
912
      walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
913
      *walk_subtrees = 0;
914
    }
915
 
916
  /* Otherwise, mark the tree as visited and keep looking.  */
917
  else
918
    TREE_VISITED (t) = 1;
919
 
920
  return NULL_TREE;
921
}
922
 
923
static tree
924
unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
925
                  void *data ATTRIBUTE_UNUSED)
926
{
927
  if (TREE_VISITED (*tp))
928
    TREE_VISITED (*tp) = 0;
929
  else
930
    *walk_subtrees = 0;
931
 
932
  return NULL_TREE;
933
}
934
 
935
/* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
936
   bodies of any nested functions if we are unsharing the entire body of
937
   FNDECL.  */
938
 
939
static void
940
unshare_body (tree *body_p, tree fndecl)
941
{
942
  struct cgraph_node *cgn = cgraph_node (fndecl);
943
 
944
  walk_tree (body_p, copy_if_shared_r, NULL, NULL);
945
  if (body_p == &DECL_SAVED_TREE (fndecl))
946
    for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
947
      unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
948
}
949
 
950
/* Likewise, but mark all trees as not visited.  */
951
 
952
static void
953
unvisit_body (tree *body_p, tree fndecl)
954
{
955
  struct cgraph_node *cgn = cgraph_node (fndecl);
956
 
957
  walk_tree (body_p, unmark_visited_r, NULL, NULL);
958
  if (body_p == &DECL_SAVED_TREE (fndecl))
959
    for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
960
      unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
961
}
962
 
963
/* Unconditionally make an unshared copy of EXPR.  This is used when using
964
   stored expressions which span multiple functions, such as BINFO_VTABLE,
965
   as the normal unsharing process can't tell that they're shared.  */
966
 
967
tree
968
unshare_expr (tree expr)
969
{
970
  walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
971
  return expr;
972
}
973
 
974
/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
975
   contain statements and have a value.  Assign its value to a temporary
976
   and give it void_type_node.  Returns the temporary, or NULL_TREE if
977
   WRAPPER was already void.  */
978
 
979
tree
980
voidify_wrapper_expr (tree wrapper, tree temp)
981
{
982
  tree type = TREE_TYPE (wrapper);
983
  if (type && !VOID_TYPE_P (type))
984
    {
985
      tree *p;
986
 
987
      /* Set p to point to the body of the wrapper.  Loop until we find
988
         something that isn't a wrapper.  */
989
      for (p = &wrapper; p && *p; )
990
        {
991
          switch (TREE_CODE (*p))
992
            {
993
            case BIND_EXPR:
994
              TREE_SIDE_EFFECTS (*p) = 1;
995
              TREE_TYPE (*p) = void_type_node;
996
              /* For a BIND_EXPR, the body is operand 1.  */
997
              p = &BIND_EXPR_BODY (*p);
998
              break;
999
 
1000
            case CLEANUP_POINT_EXPR:
1001
            case TRY_FINALLY_EXPR:
1002
            case TRY_CATCH_EXPR:
1003
              TREE_SIDE_EFFECTS (*p) = 1;
1004
              TREE_TYPE (*p) = void_type_node;
1005
              p = &TREE_OPERAND (*p, 0);
1006
              break;
1007
 
1008
            case STATEMENT_LIST:
1009
              {
1010
                tree_stmt_iterator i = tsi_last (*p);
1011
                TREE_SIDE_EFFECTS (*p) = 1;
1012
                TREE_TYPE (*p) = void_type_node;
1013
                p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1014
              }
1015
              break;
1016
 
1017
            case COMPOUND_EXPR:
1018
              /* Advance to the last statement.  Set all container types to void.  */
1019
              for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1020
                {
1021
                  TREE_SIDE_EFFECTS (*p) = 1;
1022
                  TREE_TYPE (*p) = void_type_node;
1023
                }
1024
              break;
1025
 
1026
            default:
1027
              goto out;
1028
            }
1029
        }
1030
 
1031
    out:
1032
      if (p == NULL || IS_EMPTY_STMT (*p))
1033
        temp = NULL_TREE;
1034
      else if (temp)
1035
        {
1036
          /* The wrapper is on the RHS of an assignment that we're pushing
1037
             down.  */
1038
          gcc_assert (TREE_CODE (temp) == INIT_EXPR
1039
                      || TREE_CODE (temp) == MODIFY_EXPR);
1040
          TREE_OPERAND (temp, 1) = *p;
1041
          *p = temp;
1042
        }
1043
      else
1044
        {
1045
          temp = create_tmp_var (type, "retval");
1046
          *p = build2 (INIT_EXPR, type, temp, *p);
1047
        }
1048
 
1049
      return temp;
1050
    }
1051
 
1052
  return NULL_TREE;
1053
}
1054
 
1055
/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1056
   a temporary through which they communicate.  */
1057
 
1058
static void
1059
build_stack_save_restore (gimple *save, gimple *restore)
1060
{
1061
  tree tmp_var;
1062
 
1063
  *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
1064
  tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1065
  gimple_call_set_lhs (*save, tmp_var);
1066
 
1067
  *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1068
                            1, tmp_var);
1069
}
1070
 
1071
/* Gimplify a BIND_EXPR.  Just voidify and recurse.  */
1072
 
1073
static enum gimplify_status
1074
gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1075
{
1076
  tree bind_expr = *expr_p;
1077
  bool old_save_stack = gimplify_ctxp->save_stack;
1078
  tree t;
1079
  gimple gimple_bind;
1080
  gimple_seq body;
1081
 
1082
  tree temp = voidify_wrapper_expr (bind_expr, NULL);
1083
 
1084
  /* Mark variables seen in this bind expr.  */
1085
  for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1086
    {
1087
      if (TREE_CODE (t) == VAR_DECL)
1088
        {
1089
          struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1090
 
1091
          /* Mark variable as local.  */
1092
          if (ctx && !is_global_var (t)
1093
              && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1094
                  || splay_tree_lookup (ctx->variables,
1095
                                        (splay_tree_key) t) == NULL))
1096
            omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1097
 
1098
          DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1099
 
1100
          if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1101
            cfun->has_local_explicit_reg_vars = true;
1102
        }
1103
 
1104
      /* Preliminarily mark non-addressed complex variables as eligible
1105
         for promotion to gimple registers.  We'll transform their uses
1106
         as we find them.
1107
         We exclude complex types if not optimizing because they can be
1108
         subject to partial stores in GNU C by means of the __real__ and
1109
         __imag__ operators and we cannot promote them to total stores
1110
         (see gimplify_modify_expr_complex_part).  */
1111
      if (optimize
1112
          && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1113
              || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1114
          && !TREE_THIS_VOLATILE (t)
1115
          && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1116
          && !needs_to_live_in_memory (t))
1117
        DECL_GIMPLE_REG_P (t) = 1;
1118
    }
1119
 
1120
  gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1121
                                   BIND_EXPR_BLOCK (bind_expr));
1122
  gimple_push_bind_expr (gimple_bind);
1123
 
1124
  gimplify_ctxp->save_stack = false;
1125
 
1126
  /* Gimplify the body into the GIMPLE_BIND tuple's body.  */
1127
  body = NULL;
1128
  gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1129
  gimple_bind_set_body (gimple_bind, body);
1130
 
1131
  if (gimplify_ctxp->save_stack)
1132
    {
1133
      gimple stack_save, stack_restore, gs;
1134
      gimple_seq cleanup, new_body;
1135
 
1136
      /* Save stack on entry and restore it on exit.  Add a try_finally
1137
         block to achieve this.  Note that mudflap depends on the
1138
         format of the emitted code: see mx_register_decls().  */
1139
      build_stack_save_restore (&stack_save, &stack_restore);
1140
 
1141
      cleanup = new_body = NULL;
1142
      gimplify_seq_add_stmt (&cleanup, stack_restore);
1143
      gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1144
                             GIMPLE_TRY_FINALLY);
1145
 
1146
      gimplify_seq_add_stmt (&new_body, stack_save);
1147
      gimplify_seq_add_stmt (&new_body, gs);
1148
      gimple_bind_set_body (gimple_bind, new_body);
1149
    }
1150
 
1151
  gimplify_ctxp->save_stack = old_save_stack;
1152
  gimple_pop_bind_expr ();
1153
 
1154
  gimplify_seq_add_stmt (pre_p, gimple_bind);
1155
 
1156
  if (temp)
1157
    {
1158
      *expr_p = temp;
1159
      return GS_OK;
1160
    }
1161
 
1162
  *expr_p = NULL_TREE;
1163
  return GS_ALL_DONE;
1164
}
1165
 
1166
/* Gimplify a RETURN_EXPR.  If the expression to be returned is not a
1167
   GIMPLE value, it is assigned to a new temporary and the statement is
1168
   re-written to return the temporary.
1169
 
1170
   PRE_P points to the sequence where side effects that must happen before
1171
   STMT should be stored.  */
1172
 
1173
static enum gimplify_status
1174
gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1175
{
1176
  gimple ret;
1177
  tree ret_expr = TREE_OPERAND (stmt, 0);
1178
  tree result_decl, result;
1179
 
1180
  if (ret_expr == error_mark_node)
1181
    return GS_ERROR;
1182
 
1183
  if (!ret_expr
1184
      || TREE_CODE (ret_expr) == RESULT_DECL
1185
      || ret_expr == error_mark_node)
1186
    {
1187
      gimple ret = gimple_build_return (ret_expr);
1188
      gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1189
      gimplify_seq_add_stmt (pre_p, ret);
1190
      return GS_ALL_DONE;
1191
    }
1192
 
1193
  if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1194
    result_decl = NULL_TREE;
1195
  else
1196
    {
1197
      result_decl = TREE_OPERAND (ret_expr, 0);
1198
 
1199
      /* See through a return by reference.  */
1200
      if (TREE_CODE (result_decl) == INDIRECT_REF)
1201
        result_decl = TREE_OPERAND (result_decl, 0);
1202
 
1203
      gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1204
                   || TREE_CODE (ret_expr) == INIT_EXPR)
1205
                  && TREE_CODE (result_decl) == RESULT_DECL);
1206
    }
1207
 
1208
  /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1209
     Recall that aggregate_value_p is FALSE for any aggregate type that is
1210
     returned in registers.  If we're returning values in registers, then
1211
     we don't want to extend the lifetime of the RESULT_DECL, particularly
1212
     across another call.  In addition, for those aggregates for which
1213
     hard_function_value generates a PARALLEL, we'll die during normal
1214
     expansion of structure assignments; there's special code in expand_return
1215
     to handle this case that does not exist in expand_expr.  */
1216
  if (!result_decl
1217
      || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1218
    result = result_decl;
1219
  else if (gimplify_ctxp->return_temp)
1220
    result = gimplify_ctxp->return_temp;
1221
  else
1222
    {
1223
      result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1224
      if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1225
          || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1226
        DECL_GIMPLE_REG_P (result) = 1;
1227
 
1228
      /* ??? With complex control flow (usually involving abnormal edges),
1229
         we can wind up warning about an uninitialized value for this.  Due
1230
         to how this variable is constructed and initialized, this is never
1231
         true.  Give up and never warn.  */
1232
      TREE_NO_WARNING (result) = 1;
1233
 
1234
      gimplify_ctxp->return_temp = result;
1235
    }
1236
 
1237
  /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1238
     Then gimplify the whole thing.  */
1239
  if (result != result_decl)
1240
    TREE_OPERAND (ret_expr, 0) = result;
1241
 
1242
  gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1243
 
1244
  ret = gimple_build_return (result);
1245
  gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1246
  gimplify_seq_add_stmt (pre_p, ret);
1247
 
1248
  return GS_ALL_DONE;
1249
}
1250
 
1251
static void
1252
gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1253
{
1254
  /* This is a variable-sized decl.  Simplify its size and mark it
1255
     for deferred expansion.  Note that mudflap depends on the format
1256
     of the emitted code: see mx_register_decls().  */
1257
  tree t, addr, ptr_type;
1258
 
1259
  gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1260
  gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1261
 
1262
  /* All occurrences of this decl in final gimplified code will be
1263
     replaced by indirection.  Setting DECL_VALUE_EXPR does two
1264
     things: First, it lets the rest of the gimplifier know what
1265
     replacement to use.  Second, it lets the debug info know
1266
     where to find the value.  */
1267
  ptr_type = build_pointer_type (TREE_TYPE (decl));
1268
  addr = create_tmp_var (ptr_type, get_name (decl));
1269
  DECL_IGNORED_P (addr) = 0;
1270
  t = build_fold_indirect_ref (addr);
1271
  SET_DECL_VALUE_EXPR (decl, t);
1272
  DECL_HAS_VALUE_EXPR_P (decl) = 1;
1273
 
1274
  t = built_in_decls[BUILT_IN_ALLOCA];
1275
  t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1276
  t = fold_convert (ptr_type, t);
1277
  t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1278
 
1279
  gimplify_and_add (t, seq_p);
1280
 
1281
  /* Indicate that we need to restore the stack level when the
1282
     enclosing BIND_EXPR is exited.  */
1283
  gimplify_ctxp->save_stack = true;
1284
}
1285
 
1286
 
1287
/* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1288
   and initialization explicit.  */
1289
 
1290
static enum gimplify_status
1291
gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1292
{
1293
  tree stmt = *stmt_p;
1294
  tree decl = DECL_EXPR_DECL (stmt);
1295
 
1296
  *stmt_p = NULL_TREE;
1297
 
1298
  if (TREE_TYPE (decl) == error_mark_node)
1299
    return GS_ERROR;
1300
 
1301
  if ((TREE_CODE (decl) == TYPE_DECL
1302
       || TREE_CODE (decl) == VAR_DECL)
1303
      && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1304
    gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1305
 
1306
  if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1307
    {
1308
      tree init = DECL_INITIAL (decl);
1309
 
1310
      if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1311
          || (!TREE_STATIC (decl)
1312
              && flag_stack_check == GENERIC_STACK_CHECK
1313
              && compare_tree_int (DECL_SIZE_UNIT (decl),
1314
                                   STACK_CHECK_MAX_VAR_SIZE) > 0))
1315
        gimplify_vla_decl (decl, seq_p);
1316
 
1317
      if (init && init != error_mark_node)
1318
        {
1319
          if (!TREE_STATIC (decl))
1320
            {
1321
              DECL_INITIAL (decl) = NULL_TREE;
1322
              init = build2 (INIT_EXPR, void_type_node, decl, init);
1323
              gimplify_and_add (init, seq_p);
1324
              ggc_free (init);
1325
            }
1326
          else
1327
            /* We must still examine initializers for static variables
1328
               as they may contain a label address.  */
1329
            walk_tree (&init, force_labels_r, NULL, NULL);
1330
        }
1331
 
1332
      /* Some front ends do not explicitly declare all anonymous
1333
         artificial variables.  We compensate here by declaring the
1334
         variables, though it would be better if the front ends would
1335
         explicitly declare them.  */
1336
      if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1337
          && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1338
        gimple_add_tmp_var (decl);
1339
    }
1340
 
1341
  return GS_ALL_DONE;
1342
}
1343
 
1344
/* Gimplify a LOOP_EXPR.  Normally this just involves gimplifying the body
1345
   and replacing the LOOP_EXPR with goto, but if the loop contains an
1346
   EXIT_EXPR, we need to append a label for it to jump to.  */
1347
 
1348
static enum gimplify_status
1349
gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1350
{
1351
  tree saved_label = gimplify_ctxp->exit_label;
1352
  tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1353
 
1354
  gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1355
 
1356
  gimplify_ctxp->exit_label = NULL_TREE;
1357
 
1358
  gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1359
 
1360
  gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1361
 
1362
  if (gimplify_ctxp->exit_label)
1363
    gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label));
1364
 
1365
  gimplify_ctxp->exit_label = saved_label;
1366
 
1367
  *expr_p = NULL;
1368
  return GS_ALL_DONE;
1369
}
1370
 
1371
/* Gimplifies a statement list onto a sequence.  These may be created either
1372
   by an enlightened front-end, or by shortcut_cond_expr.  */
1373
 
1374
static enum gimplify_status
1375
gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1376
{
1377
  tree temp = voidify_wrapper_expr (*expr_p, NULL);
1378
 
1379
  tree_stmt_iterator i = tsi_start (*expr_p);
1380
 
1381
  while (!tsi_end_p (i))
1382
    {
1383
      gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1384
      tsi_delink (&i);
1385
    }
1386
 
1387
  if (temp)
1388
    {
1389
      *expr_p = temp;
1390
      return GS_OK;
1391
    }
1392
 
1393
  return GS_ALL_DONE;
1394
}
1395
 
1396
/* Compare two case labels.  Because the front end should already have
1397
   made sure that case ranges do not overlap, it is enough to only compare
1398
   the CASE_LOW values of each case label.  */
1399
 
1400
static int
1401
compare_case_labels (const void *p1, const void *p2)
1402
{
1403
  const_tree const case1 = *(const_tree const*)p1;
1404
  const_tree const case2 = *(const_tree const*)p2;
1405
 
1406
  /* The 'default' case label always goes first.  */
1407
  if (!CASE_LOW (case1))
1408
    return -1;
1409
  else if (!CASE_LOW (case2))
1410
    return 1;
1411
  else
1412
    return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1413
}
1414
 
1415
 
1416
/* Sort the case labels in LABEL_VEC in place in ascending order.  */
1417
 
1418
void
1419
sort_case_labels (VEC(tree,heap)* label_vec)
1420
{
1421
  size_t len = VEC_length (tree, label_vec);
1422
  qsort (VEC_address (tree, label_vec), len, sizeof (tree),
1423
         compare_case_labels);
1424
}
1425
 
1426
 
1427
/* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1428
   branch to.  */
1429
 
1430
static enum gimplify_status
1431
gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1432
{
1433
  tree switch_expr = *expr_p;
1434
  gimple_seq switch_body_seq = NULL;
1435
  enum gimplify_status ret;
1436
 
1437
  ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1438
                       fb_rvalue);
1439
  if (ret == GS_ERROR || ret == GS_UNHANDLED)
1440
    return ret;
1441
 
1442
  if (SWITCH_BODY (switch_expr))
1443
    {
1444
      VEC (tree,heap) *labels;
1445
      VEC (tree,heap) *saved_labels;
1446
      tree default_case = NULL_TREE;
1447
      size_t i, len;
1448
      gimple gimple_switch;
1449
 
1450
      /* If someone can be bothered to fill in the labels, they can
1451
         be bothered to null out the body too.  */
1452
      gcc_assert (!SWITCH_LABELS (switch_expr));
1453
 
1454
      /* save old labels, get new ones from body, then restore the old
1455
         labels.  Save all the things from the switch body to append after.  */
1456
      saved_labels = gimplify_ctxp->case_labels;
1457
      gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1458
 
1459
      gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1460
      labels = gimplify_ctxp->case_labels;
1461
      gimplify_ctxp->case_labels = saved_labels;
1462
 
1463
      i = 0;
1464
      while (i < VEC_length (tree, labels))
1465
        {
1466
          tree elt = VEC_index (tree, labels, i);
1467
          tree low = CASE_LOW (elt);
1468
          bool remove_element = FALSE;
1469
 
1470
          if (low)
1471
            {
1472
              /* Discard empty ranges.  */
1473
              tree high = CASE_HIGH (elt);
1474
              if (high && tree_int_cst_lt (high, low))
1475
                remove_element = TRUE;
1476
            }
1477
          else
1478
            {
1479
              /* The default case must be the last label in the list.  */
1480
              gcc_assert (!default_case);
1481
              default_case = elt;
1482
              remove_element = TRUE;
1483
            }
1484
 
1485
          if (remove_element)
1486
            VEC_ordered_remove (tree, labels, i);
1487
          else
1488
            i++;
1489
        }
1490
      len = i;
1491
 
1492
      if (!VEC_empty (tree, labels))
1493
        sort_case_labels (labels);
1494
 
1495
      if (!default_case)
1496
        {
1497
          tree type = TREE_TYPE (switch_expr);
1498
 
1499
          /* If the switch has no default label, add one, so that we jump
1500
             around the switch body.  If the labels already cover the whole
1501
             range of type, add the default label pointing to one of the
1502
             existing labels.  */
1503
          if (type == void_type_node)
1504
            type = TREE_TYPE (SWITCH_COND (switch_expr));
1505
          if (len
1506
              && INTEGRAL_TYPE_P (type)
1507
              && TYPE_MIN_VALUE (type)
1508
              && TYPE_MAX_VALUE (type)
1509
              && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1510
                                     TYPE_MIN_VALUE (type)))
1511
            {
1512
              tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1513
              if (!high)
1514
                high = CASE_LOW (VEC_index (tree, labels, len - 1));
1515
              if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type)))
1516
                {
1517
                  for (i = 1; i < len; i++)
1518
                    {
1519
                      high = CASE_LOW (VEC_index (tree, labels, i));
1520
                      low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1521
                      if (!low)
1522
                        low = CASE_LOW (VEC_index (tree, labels, i - 1));
1523
                      if ((TREE_INT_CST_LOW (low) + 1
1524
                           != TREE_INT_CST_LOW (high))
1525
                          || (TREE_INT_CST_HIGH (low)
1526
                              + (TREE_INT_CST_LOW (high) == 0)
1527
                              != TREE_INT_CST_HIGH (high)))
1528
                        break;
1529
                    }
1530
                  if (i == len)
1531
                    default_case = build3 (CASE_LABEL_EXPR, void_type_node,
1532
                                           NULL_TREE, NULL_TREE,
1533
                                           CASE_LABEL (VEC_index (tree,
1534
                                                                  labels, 0)));
1535
                }
1536
            }
1537
 
1538
          if (!default_case)
1539
            {
1540
              gimple new_default;
1541
 
1542
              default_case
1543
                = build3 (CASE_LABEL_EXPR, void_type_node,
1544
                          NULL_TREE, NULL_TREE,
1545
                          create_artificial_label (UNKNOWN_LOCATION));
1546
              new_default = gimple_build_label (CASE_LABEL (default_case));
1547
              gimplify_seq_add_stmt (&switch_body_seq, new_default);
1548
            }
1549
        }
1550
 
1551
      gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1552
                                               default_case, labels);
1553
      gimplify_seq_add_stmt (pre_p, gimple_switch);
1554
      gimplify_seq_add_seq (pre_p, switch_body_seq);
1555
      VEC_free(tree, heap, labels);
1556
    }
1557
  else
1558
    gcc_assert (SWITCH_LABELS (switch_expr));
1559
 
1560
  return GS_ALL_DONE;
1561
}
1562
 
1563
 
1564
static enum gimplify_status
1565
gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1566
{
1567
  struct gimplify_ctx *ctxp;
1568
  gimple gimple_label;
1569
 
1570
  /* Invalid OpenMP programs can play Duff's Device type games with
1571
     #pragma omp parallel.  At least in the C front end, we don't
1572
     detect such invalid branches until after gimplification.  */
1573
  for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1574
    if (ctxp->case_labels)
1575
      break;
1576
 
1577
  gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1578
  VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1579
  gimplify_seq_add_stmt (pre_p, gimple_label);
1580
 
1581
  return GS_ALL_DONE;
1582
}
1583
 
1584
/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1585
   if necessary.  */
1586
 
1587
tree
1588
build_and_jump (tree *label_p)
1589
{
1590
  if (label_p == NULL)
1591
    /* If there's nowhere to jump, just fall through.  */
1592
    return NULL_TREE;
1593
 
1594
  if (*label_p == NULL_TREE)
1595
    {
1596
      tree label = create_artificial_label (UNKNOWN_LOCATION);
1597
      *label_p = label;
1598
    }
1599
 
1600
  return build1 (GOTO_EXPR, void_type_node, *label_p);
1601
}
1602
 
1603
/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1604
   This also involves building a label to jump to and communicating it to
1605
   gimplify_loop_expr through gimplify_ctxp->exit_label.  */
1606
 
1607
static enum gimplify_status
1608
gimplify_exit_expr (tree *expr_p)
1609
{
1610
  tree cond = TREE_OPERAND (*expr_p, 0);
1611
  tree expr;
1612
 
1613
  expr = build_and_jump (&gimplify_ctxp->exit_label);
1614
  expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1615
  *expr_p = expr;
1616
 
1617
  return GS_OK;
1618
}
1619
 
1620
/* A helper function to be called via walk_tree.  Mark all labels under *TP
1621
   as being forced.  To be called for DECL_INITIAL of static variables.  */
1622
 
1623
tree
1624
force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1625
{
1626
  if (TYPE_P (*tp))
1627
    *walk_subtrees = 0;
1628
  if (TREE_CODE (*tp) == LABEL_DECL)
1629
    FORCED_LABEL (*tp) = 1;
1630
 
1631
  return NULL_TREE;
1632
}
1633
 
1634
/* *EXPR_P is a COMPONENT_REF being used as an rvalue.  If its type is
1635
   different from its canonical type, wrap the whole thing inside a
1636
   NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1637
   type.
1638
 
1639
   The canonical type of a COMPONENT_REF is the type of the field being
1640
   referenced--unless the field is a bit-field which can be read directly
1641
   in a smaller mode, in which case the canonical type is the
1642
   sign-appropriate type corresponding to that mode.  */
1643
 
1644
static void
1645
canonicalize_component_ref (tree *expr_p)
1646
{
1647
  tree expr = *expr_p;
1648
  tree type;
1649
 
1650
  gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1651
 
1652
  if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1653
    type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1654
  else
1655
    type = TREE_TYPE (TREE_OPERAND (expr, 1));
1656
 
1657
  /* One could argue that all the stuff below is not necessary for
1658
     the non-bitfield case and declare it a FE error if type
1659
     adjustment would be needed.  */
1660
  if (TREE_TYPE (expr) != type)
1661
    {
1662
#ifdef ENABLE_TYPES_CHECKING
1663
      tree old_type = TREE_TYPE (expr);
1664
#endif
1665
      int type_quals;
1666
 
1667
      /* We need to preserve qualifiers and propagate them from
1668
         operand 0.  */
1669
      type_quals = TYPE_QUALS (type)
1670
        | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1671
      if (TYPE_QUALS (type) != type_quals)
1672
        type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1673
 
1674
      /* Set the type of the COMPONENT_REF to the underlying type.  */
1675
      TREE_TYPE (expr) = type;
1676
 
1677
#ifdef ENABLE_TYPES_CHECKING
1678
      /* It is now a FE error, if the conversion from the canonical
1679
         type to the original expression type is not useless.  */
1680
      gcc_assert (useless_type_conversion_p (old_type, type));
1681
#endif
1682
    }
1683
}
1684
 
1685
/* If a NOP conversion is changing a pointer to array of foo to a pointer
1686
   to foo, embed that change in the ADDR_EXPR by converting
1687
      T array[U];
1688
      (T *)&array
1689
   ==>
1690
      &array[L]
1691
   where L is the lower bound.  For simplicity, only do this for constant
1692
   lower bound.
1693
   The constraint is that the type of &array[L] is trivially convertible
1694
   to T *.  */
1695
 
1696
static void
1697
canonicalize_addr_expr (tree *expr_p)
1698
{
1699
  tree expr = *expr_p;
1700
  tree addr_expr = TREE_OPERAND (expr, 0);
1701
  tree datype, ddatype, pddatype;
1702
 
1703
  /* We simplify only conversions from an ADDR_EXPR to a pointer type.  */
1704
  if (!POINTER_TYPE_P (TREE_TYPE (expr))
1705
      || TREE_CODE (addr_expr) != ADDR_EXPR)
1706
    return;
1707
 
1708
  /* The addr_expr type should be a pointer to an array.  */
1709
  datype = TREE_TYPE (TREE_TYPE (addr_expr));
1710
  if (TREE_CODE (datype) != ARRAY_TYPE)
1711
    return;
1712
 
1713
  /* The pointer to element type shall be trivially convertible to
1714
     the expression pointer type.  */
1715
  ddatype = TREE_TYPE (datype);
1716
  pddatype = build_pointer_type (ddatype);
1717
  if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1718
                                  pddatype))
1719
    return;
1720
 
1721
  /* The lower bound and element sizes must be constant.  */
1722
  if (!TYPE_SIZE_UNIT (ddatype)
1723
      || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1724
      || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1725
      || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1726
    return;
1727
 
1728
  /* All checks succeeded.  Build a new node to merge the cast.  */
1729
  *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1730
                    TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1731
                    NULL_TREE, NULL_TREE);
1732
  *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1733
 
1734
  /* We can have stripped a required restrict qualifier above.  */
1735
  if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1736
    *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1737
}
1738
 
1739
/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR.  Remove it and/or other conversions
1740
   underneath as appropriate.  */
1741
 
1742
static enum gimplify_status
1743
gimplify_conversion (tree *expr_p)
1744
{
1745
  tree tem;
1746
  location_t loc = EXPR_LOCATION (*expr_p);
1747
  gcc_assert (CONVERT_EXPR_P (*expr_p));
1748
 
1749
  /* Then strip away all but the outermost conversion.  */
1750
  STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1751
 
1752
  /* And remove the outermost conversion if it's useless.  */
1753
  if (tree_ssa_useless_type_conversion (*expr_p))
1754
    *expr_p = TREE_OPERAND (*expr_p, 0);
1755
 
1756
  /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
1757
     For example this fold (subclass *)&A into &A->subclass avoiding
1758
     a need for statement.  */
1759
  if (CONVERT_EXPR_P (*expr_p)
1760
      && POINTER_TYPE_P (TREE_TYPE (*expr_p))
1761
      && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
1762
      && (tem = maybe_fold_offset_to_address
1763
          (EXPR_LOCATION (*expr_p), TREE_OPERAND (*expr_p, 0),
1764
           integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE)
1765
    *expr_p = tem;
1766
 
1767
  /* If we still have a conversion at the toplevel,
1768
     then canonicalize some constructs.  */
1769
  if (CONVERT_EXPR_P (*expr_p))
1770
    {
1771
      tree sub = TREE_OPERAND (*expr_p, 0);
1772
 
1773
      /* If a NOP conversion is changing the type of a COMPONENT_REF
1774
         expression, then canonicalize its type now in order to expose more
1775
         redundant conversions.  */
1776
      if (TREE_CODE (sub) == COMPONENT_REF)
1777
        canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1778
 
1779
      /* If a NOP conversion is changing a pointer to array of foo
1780
         to a pointer to foo, embed that change in the ADDR_EXPR.  */
1781
      else if (TREE_CODE (sub) == ADDR_EXPR)
1782
        canonicalize_addr_expr (expr_p);
1783
    }
1784
 
1785
  /* If we have a conversion to a non-register type force the
1786
     use of a VIEW_CONVERT_EXPR instead.  */
1787
  if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1788
    *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1789
                               TREE_OPERAND (*expr_p, 0));
1790
 
1791
  return GS_OK;
1792
}
1793
 
1794
/* Nonlocal VLAs seen in the current function.  */
1795
static struct pointer_set_t *nonlocal_vlas;
1796
 
1797
/* Gimplify a VAR_DECL or PARM_DECL.  Returns GS_OK if we expanded a
1798
   DECL_VALUE_EXPR, and it's worth re-examining things.  */
1799
 
1800
static enum gimplify_status
1801
gimplify_var_or_parm_decl (tree *expr_p)
1802
{
1803
  tree decl = *expr_p;
1804
 
1805
  /* ??? If this is a local variable, and it has not been seen in any
1806
     outer BIND_EXPR, then it's probably the result of a duplicate
1807
     declaration, for which we've already issued an error.  It would
1808
     be really nice if the front end wouldn't leak these at all.
1809
     Currently the only known culprit is C++ destructors, as seen
1810
     in g++.old-deja/g++.jason/binding.C.  */
1811
  if (TREE_CODE (decl) == VAR_DECL
1812
      && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1813
      && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1814
      && decl_function_context (decl) == current_function_decl)
1815
    {
1816
      gcc_assert (errorcount || sorrycount);
1817
      return GS_ERROR;
1818
    }
1819
 
1820
  /* When within an OpenMP context, notice uses of variables.  */
1821
  if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1822
    return GS_ALL_DONE;
1823
 
1824
  /* If the decl is an alias for another expression, substitute it now.  */
1825
  if (DECL_HAS_VALUE_EXPR_P (decl))
1826
    {
1827
      tree value_expr = DECL_VALUE_EXPR (decl);
1828
 
1829
      /* For referenced nonlocal VLAs add a decl for debugging purposes
1830
         to the current function.  */
1831
      if (TREE_CODE (decl) == VAR_DECL
1832
          && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1833
          && nonlocal_vlas != NULL
1834
          && TREE_CODE (value_expr) == INDIRECT_REF
1835
          && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1836
          && decl_function_context (decl) != current_function_decl)
1837
        {
1838
          struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1839
          while (ctx && ctx->region_type == ORT_WORKSHARE)
1840
            ctx = ctx->outer_context;
1841
          if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1842
            {
1843
              tree copy = copy_node (decl), block;
1844
 
1845
              lang_hooks.dup_lang_specific_decl (copy);
1846
              SET_DECL_RTL (copy, NULL_RTX);
1847
              TREE_USED (copy) = 1;
1848
              block = DECL_INITIAL (current_function_decl);
1849
              TREE_CHAIN (copy) = BLOCK_VARS (block);
1850
              BLOCK_VARS (block) = copy;
1851
              SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1852
              DECL_HAS_VALUE_EXPR_P (copy) = 1;
1853
            }
1854
        }
1855
 
1856
      *expr_p = unshare_expr (value_expr);
1857
      return GS_OK;
1858
    }
1859
 
1860
  return GS_ALL_DONE;
1861
}
1862
 
1863
 
1864
/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1865
   node *EXPR_P.
1866
 
1867
      compound_lval
1868
              : min_lval '[' val ']'
1869
              | min_lval '.' ID
1870
              | compound_lval '[' val ']'
1871
              | compound_lval '.' ID
1872
 
1873
   This is not part of the original SIMPLE definition, which separates
1874
   array and member references, but it seems reasonable to handle them
1875
   together.  Also, this way we don't run into problems with union
1876
   aliasing; gcc requires that for accesses through a union to alias, the
1877
   union reference must be explicit, which was not always the case when we
1878
   were splitting up array and member refs.
1879
 
1880
   PRE_P points to the sequence where side effects that must happen before
1881
     *EXPR_P should be stored.
1882
 
1883
   POST_P points to the sequence where side effects that must happen after
1884
     *EXPR_P should be stored.  */
1885
 
1886
static enum gimplify_status
1887
gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1888
                        fallback_t fallback)
1889
{
1890
  tree *p;
1891
  VEC(tree,heap) *stack;
1892
  enum gimplify_status ret = GS_OK, tret;
1893
  int i;
1894
  location_t loc = EXPR_LOCATION (*expr_p);
1895
 
1896
  /* Create a stack of the subexpressions so later we can walk them in
1897
     order from inner to outer.  */
1898
  stack = VEC_alloc (tree, heap, 10);
1899
 
1900
  /* We can handle anything that get_inner_reference can deal with.  */
1901
  for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1902
    {
1903
    restart:
1904
      /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs.  */
1905
      if (TREE_CODE (*p) == INDIRECT_REF)
1906
        *p = fold_indirect_ref_loc (loc, *p);
1907
 
1908
      if (handled_component_p (*p))
1909
        ;
1910
      /* Expand DECL_VALUE_EXPR now.  In some cases that may expose
1911
         additional COMPONENT_REFs.  */
1912
      else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1913
               && gimplify_var_or_parm_decl (p) == GS_OK)
1914
        goto restart;
1915
      else
1916
        break;
1917
 
1918
      VEC_safe_push (tree, heap, stack, *p);
1919
    }
1920
 
1921
  gcc_assert (VEC_length (tree, stack));
1922
 
1923
  /* Now STACK is a stack of pointers to all the refs we've walked through
1924
     and P points to the innermost expression.
1925
 
1926
     Java requires that we elaborated nodes in source order.  That
1927
     means we must gimplify the inner expression followed by each of
1928
     the indices, in order.  But we can't gimplify the inner
1929
     expression until we deal with any variable bounds, sizes, or
1930
     positions in order to deal with PLACEHOLDER_EXPRs.
1931
 
1932
     So we do this in three steps.  First we deal with the annotations
1933
     for any variables in the components, then we gimplify the base,
1934
     then we gimplify any indices, from left to right.  */
1935
  for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1936
    {
1937
      tree t = VEC_index (tree, stack, i);
1938
 
1939
      if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1940
        {
1941
          /* Gimplify the low bound and element type size and put them into
1942
             the ARRAY_REF.  If these values are set, they have already been
1943
             gimplified.  */
1944
          if (TREE_OPERAND (t, 2) == NULL_TREE)
1945
            {
1946
              tree low = unshare_expr (array_ref_low_bound (t));
1947
              if (!is_gimple_min_invariant (low))
1948
                {
1949
                  TREE_OPERAND (t, 2) = low;
1950
                  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1951
                                        post_p, is_gimple_reg,
1952
                                        fb_rvalue);
1953
                  ret = MIN (ret, tret);
1954
                }
1955
            }
1956
 
1957
          if (!TREE_OPERAND (t, 3))
1958
            {
1959
              tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1960
              tree elmt_size = unshare_expr (array_ref_element_size (t));
1961
              tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1962
 
1963
              /* Divide the element size by the alignment of the element
1964
                 type (above).  */
1965
              elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
1966
 
1967
              if (!is_gimple_min_invariant (elmt_size))
1968
                {
1969
                  TREE_OPERAND (t, 3) = elmt_size;
1970
                  tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
1971
                                        post_p, is_gimple_reg,
1972
                                        fb_rvalue);
1973
                  ret = MIN (ret, tret);
1974
                }
1975
            }
1976
        }
1977
      else if (TREE_CODE (t) == COMPONENT_REF)
1978
        {
1979
          /* Set the field offset into T and gimplify it.  */
1980
          if (!TREE_OPERAND (t, 2))
1981
            {
1982
              tree offset = unshare_expr (component_ref_field_offset (t));
1983
              tree field = TREE_OPERAND (t, 1);
1984
              tree factor
1985
                = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1986
 
1987
              /* Divide the offset by its alignment.  */
1988
              offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
1989
 
1990
              if (!is_gimple_min_invariant (offset))
1991
                {
1992
                  TREE_OPERAND (t, 2) = offset;
1993
                  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1994
                                        post_p, is_gimple_reg,
1995
                                        fb_rvalue);
1996
                  ret = MIN (ret, tret);
1997
                }
1998
            }
1999
        }
2000
    }
2001
 
2002
  /* Step 2 is to gimplify the base expression.  Make sure lvalue is set
2003
     so as to match the min_lval predicate.  Failure to do so may result
2004
     in the creation of large aggregate temporaries.  */
2005
  tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2006
                        fallback | fb_lvalue);
2007
  ret = MIN (ret, tret);
2008
 
2009
  /* And finally, the indices and operands to BIT_FIELD_REF.  During this
2010
     loop we also remove any useless conversions.  */
2011
  for (; VEC_length (tree, stack) > 0; )
2012
    {
2013
      tree t = VEC_pop (tree, stack);
2014
 
2015
      if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2016
        {
2017
          /* Gimplify the dimension.  */
2018
          if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2019
            {
2020
              tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2021
                                    is_gimple_val, fb_rvalue);
2022
              ret = MIN (ret, tret);
2023
            }
2024
        }
2025
      else if (TREE_CODE (t) == BIT_FIELD_REF)
2026
        {
2027
          tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2028
                                is_gimple_val, fb_rvalue);
2029
          ret = MIN (ret, tret);
2030
          tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2031
                                is_gimple_val, fb_rvalue);
2032
          ret = MIN (ret, tret);
2033
        }
2034
 
2035
      STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2036
 
2037
      /* The innermost expression P may have originally had
2038
         TREE_SIDE_EFFECTS set which would have caused all the outer
2039
         expressions in *EXPR_P leading to P to also have had
2040
         TREE_SIDE_EFFECTS set.  */
2041
      recalculate_side_effects (t);
2042
    }
2043
 
2044
  /* If the outermost expression is a COMPONENT_REF, canonicalize its type.  */
2045
  if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2046
    {
2047
      canonicalize_component_ref (expr_p);
2048
      ret = MIN (ret, GS_OK);
2049
    }
2050
 
2051
  VEC_free (tree, heap, stack);
2052
 
2053
  return ret;
2054
}
2055
 
2056
/*  Gimplify the self modifying expression pointed to by EXPR_P
2057
    (++, --, +=, -=).
2058
 
2059
    PRE_P points to the list where side effects that must happen before
2060
        *EXPR_P should be stored.
2061
 
2062
    POST_P points to the list where side effects that must happen after
2063
        *EXPR_P should be stored.
2064
 
2065
    WANT_VALUE is nonzero iff we want to use the value of this expression
2066
        in another expression.  */
2067
 
2068
static enum gimplify_status
2069
gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2070
                        bool want_value)
2071
{
2072
  enum tree_code code;
2073
  tree lhs, lvalue, rhs, t1;
2074
  gimple_seq post = NULL, *orig_post_p = post_p;
2075
  bool postfix;
2076
  enum tree_code arith_code;
2077
  enum gimplify_status ret;
2078
  location_t loc = EXPR_LOCATION (*expr_p);
2079
 
2080
  code = TREE_CODE (*expr_p);
2081
 
2082
  gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2083
              || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2084
 
2085
  /* Prefix or postfix?  */
2086
  if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2087
    /* Faster to treat as prefix if result is not used.  */
2088
    postfix = want_value;
2089
  else
2090
    postfix = false;
2091
 
2092
  /* For postfix, make sure the inner expression's post side effects
2093
     are executed after side effects from this expression.  */
2094
  if (postfix)
2095
    post_p = &post;
2096
 
2097
  /* Add or subtract?  */
2098
  if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2099
    arith_code = PLUS_EXPR;
2100
  else
2101
    arith_code = MINUS_EXPR;
2102
 
2103
  /* Gimplify the LHS into a GIMPLE lvalue.  */
2104
  lvalue = TREE_OPERAND (*expr_p, 0);
2105
  ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2106
  if (ret == GS_ERROR)
2107
    return ret;
2108
 
2109
  /* Extract the operands to the arithmetic operation.  */
2110
  lhs = lvalue;
2111
  rhs = TREE_OPERAND (*expr_p, 1);
2112
 
2113
  /* For postfix operator, we evaluate the LHS to an rvalue and then use
2114
     that as the result value and in the postqueue operation.  We also
2115
     make sure to make lvalue a minimal lval, see
2116
     gcc.c-torture/execute/20040313-1.c for an example where this matters.  */
2117
  if (postfix)
2118
    {
2119
      if (!is_gimple_min_lval (lvalue))
2120
        {
2121
          mark_addressable (lvalue);
2122
          lvalue = build_fold_addr_expr_loc (input_location, lvalue);
2123
          gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
2124
          lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
2125
        }
2126
      ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2127
      if (ret == GS_ERROR)
2128
        return ret;
2129
    }
2130
 
2131
  /* For POINTERs increment, use POINTER_PLUS_EXPR.  */
2132
  if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2133
    {
2134
      rhs = fold_convert_loc (loc, sizetype, rhs);
2135
      if (arith_code == MINUS_EXPR)
2136
        rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2137
      arith_code = POINTER_PLUS_EXPR;
2138
    }
2139
 
2140
  t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2141
 
2142
  if (postfix)
2143
    {
2144
      gimplify_assign (lvalue, t1, orig_post_p);
2145
      gimplify_seq_add_seq (orig_post_p, post);
2146
      *expr_p = lhs;
2147
      return GS_ALL_DONE;
2148
    }
2149
  else
2150
    {
2151
      *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2152
      return GS_OK;
2153
    }
2154
}
2155
 
2156
 
2157
/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR.  */
2158
 
2159
static void
2160
maybe_with_size_expr (tree *expr_p)
2161
{
2162
  tree expr = *expr_p;
2163
  tree type = TREE_TYPE (expr);
2164
  tree size;
2165
 
2166
  /* If we've already wrapped this or the type is error_mark_node, we can't do
2167
     anything.  */
2168
  if (TREE_CODE (expr) == WITH_SIZE_EXPR
2169
      || type == error_mark_node)
2170
    return;
2171
 
2172
  /* If the size isn't known or is a constant, we have nothing to do.  */
2173
  size = TYPE_SIZE_UNIT (type);
2174
  if (!size || TREE_CODE (size) == INTEGER_CST)
2175
    return;
2176
 
2177
  /* Otherwise, make a WITH_SIZE_EXPR.  */
2178
  size = unshare_expr (size);
2179
  size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2180
  *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2181
}
2182
 
2183
 
2184
/* Helper for gimplify_call_expr.  Gimplify a single argument *ARG_P
2185
   Store any side-effects in PRE_P.  CALL_LOCATION is the location of
2186
   the CALL_EXPR.  */
2187
 
2188
static enum gimplify_status
2189
gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2190
{
2191
  bool (*test) (tree);
2192
  fallback_t fb;
2193
 
2194
  /* In general, we allow lvalues for function arguments to avoid
2195
     extra overhead of copying large aggregates out of even larger
2196
     aggregates into temporaries only to copy the temporaries to
2197
     the argument list.  Make optimizers happy by pulling out to
2198
     temporaries those types that fit in registers.  */
2199
  if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2200
    test = is_gimple_val, fb = fb_rvalue;
2201
  else
2202
    test = is_gimple_lvalue, fb = fb_either;
2203
 
2204
  /* If this is a variable sized type, we must remember the size.  */
2205
  maybe_with_size_expr (arg_p);
2206
 
2207
  /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c.  */
2208
  /* Make sure arguments have the same location as the function call
2209
     itself.  */
2210
  protected_set_expr_location (*arg_p, call_location);
2211
 
2212
  /* There is a sequence point before a function call.  Side effects in
2213
     the argument list must occur before the actual call. So, when
2214
     gimplifying arguments, force gimplify_expr to use an internal
2215
     post queue which is then appended to the end of PRE_P.  */
2216
  return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2217
}
2218
 
2219
 
2220
/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2221
   WANT_VALUE is true if the result of the call is desired.  */
2222
 
2223
static enum gimplify_status
2224
gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2225
{
2226
  tree fndecl, parms, p;
2227
  enum gimplify_status ret;
2228
  int i, nargs;
2229
  gimple call;
2230
  bool builtin_va_start_p = FALSE;
2231
  location_t loc = EXPR_LOCATION (*expr_p);
2232
 
2233
  gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2234
 
2235
  /* For reliable diagnostics during inlining, it is necessary that
2236
     every call_expr be annotated with file and line.  */
2237
  if (! EXPR_HAS_LOCATION (*expr_p))
2238
    SET_EXPR_LOCATION (*expr_p, input_location);
2239
 
2240
  /* This may be a call to a builtin function.
2241
 
2242
     Builtin function calls may be transformed into different
2243
     (and more efficient) builtin function calls under certain
2244
     circumstances.  Unfortunately, gimplification can muck things
2245
     up enough that the builtin expanders are not aware that certain
2246
     transformations are still valid.
2247
 
2248
     So we attempt transformation/gimplification of the call before
2249
     we gimplify the CALL_EXPR.  At this time we do not manage to
2250
     transform all calls in the same manner as the expanders do, but
2251
     we do transform most of them.  */
2252
  fndecl = get_callee_fndecl (*expr_p);
2253
  if (fndecl && DECL_BUILT_IN (fndecl))
2254
    {
2255
      tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2256
 
2257
      if (new_tree && new_tree != *expr_p)
2258
        {
2259
          /* There was a transformation of this call which computes the
2260
             same value, but in a more efficient way.  Return and try
2261
             again.  */
2262
          *expr_p = new_tree;
2263
          return GS_OK;
2264
        }
2265
 
2266
      if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2267
          && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2268
        {
2269
          builtin_va_start_p = TRUE;
2270
          if (call_expr_nargs (*expr_p) < 2)
2271
            {
2272
              error ("too few arguments to function %<va_start%>");
2273
              *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2274
              return GS_OK;
2275
            }
2276
 
2277
          if (fold_builtin_next_arg (*expr_p, true))
2278
            {
2279
              *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2280
              return GS_OK;
2281
            }
2282
        }
2283
    }
2284
 
2285
  /* There is a sequence point before the call, so any side effects in
2286
     the calling expression must occur before the actual call.  Force
2287
     gimplify_expr to use an internal post queue.  */
2288
  ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2289
                       is_gimple_call_addr, fb_rvalue);
2290
 
2291
  nargs = call_expr_nargs (*expr_p);
2292
 
2293
  /* Get argument types for verification.  */
2294
  fndecl = get_callee_fndecl (*expr_p);
2295
  parms = NULL_TREE;
2296
  if (fndecl)
2297
    parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2298
  else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2299
    parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2300
 
2301
  if (fndecl && DECL_ARGUMENTS (fndecl))
2302
    p = DECL_ARGUMENTS (fndecl);
2303
  else if (parms)
2304
    p = parms;
2305
  else
2306
    p = NULL_TREE;
2307
  for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2308
    ;
2309
 
2310
  /* If the last argument is __builtin_va_arg_pack () and it is not
2311
     passed as a named argument, decrease the number of CALL_EXPR
2312
     arguments and set instead the CALL_EXPR_VA_ARG_PACK flag.  */
2313
  if (!p
2314
      && i < nargs
2315
      && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2316
    {
2317
      tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2318
      tree last_arg_fndecl = get_callee_fndecl (last_arg);
2319
 
2320
      if (last_arg_fndecl
2321
          && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2322
          && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2323
          && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2324
        {
2325
          tree call = *expr_p;
2326
 
2327
          --nargs;
2328
          *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2329
                                          CALL_EXPR_FN (call),
2330
                                          nargs, CALL_EXPR_ARGP (call));
2331
 
2332
          /* Copy all CALL_EXPR flags, location and block, except
2333
             CALL_EXPR_VA_ARG_PACK flag.  */
2334
          CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2335
          CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2336
          CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2337
            = CALL_EXPR_RETURN_SLOT_OPT (call);
2338
          CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2339
          CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call);
2340
          SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2341
          TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2342
 
2343
          /* Set CALL_EXPR_VA_ARG_PACK.  */
2344
          CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2345
        }
2346
    }
2347
 
2348
  /* Finally, gimplify the function arguments.  */
2349
  if (nargs > 0)
2350
    {
2351
      for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2352
           PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2353
           PUSH_ARGS_REVERSED ? i-- : i++)
2354
        {
2355
          enum gimplify_status t;
2356
 
2357
          /* Avoid gimplifying the second argument to va_start, which needs to
2358
             be the plain PARM_DECL.  */
2359
          if ((i != 1) || !builtin_va_start_p)
2360
            {
2361
              t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2362
                                EXPR_LOCATION (*expr_p));
2363
 
2364
              if (t == GS_ERROR)
2365
                ret = GS_ERROR;
2366
            }
2367
        }
2368
    }
2369
 
2370
  /* Verify the function result.  */
2371
  if (want_value && fndecl
2372
      && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl))))
2373
    {
2374
      error_at (loc, "using result of function returning %<void%>");
2375
      ret = GS_ERROR;
2376
    }
2377
 
2378
  /* Try this again in case gimplification exposed something.  */
2379
  if (ret != GS_ERROR)
2380
    {
2381
      tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2382
 
2383
      if (new_tree && new_tree != *expr_p)
2384
        {
2385
          /* There was a transformation of this call which computes the
2386
             same value, but in a more efficient way.  Return and try
2387
             again.  */
2388
          *expr_p = new_tree;
2389
          return GS_OK;
2390
        }
2391
    }
2392
  else
2393
    {
2394
      *expr_p = error_mark_node;
2395
      return GS_ERROR;
2396
    }
2397
 
2398
  /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2399
     decl.  This allows us to eliminate redundant or useless
2400
     calls to "const" functions.  */
2401
  if (TREE_CODE (*expr_p) == CALL_EXPR)
2402
    {
2403
      int flags = call_expr_flags (*expr_p);
2404
      if (flags & (ECF_CONST | ECF_PURE)
2405
          /* An infinite loop is considered a side effect.  */
2406
          && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2407
        TREE_SIDE_EFFECTS (*expr_p) = 0;
2408
    }
2409
 
2410
  /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2411
     and clear *EXPR_P.  Otherwise, leave *EXPR_P in its gimplified
2412
     form and delegate the creation of a GIMPLE_CALL to
2413
     gimplify_modify_expr.  This is always possible because when
2414
     WANT_VALUE is true, the caller wants the result of this call into
2415
     a temporary, which means that we will emit an INIT_EXPR in
2416
     internal_get_tmp_var which will then be handled by
2417
     gimplify_modify_expr.  */
2418
  if (!want_value)
2419
    {
2420
      /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2421
         have to do is replicate it as a GIMPLE_CALL tuple.  */
2422
      call = gimple_build_call_from_tree (*expr_p);
2423
      gimplify_seq_add_stmt (pre_p, call);
2424
      *expr_p = NULL_TREE;
2425
    }
2426
 
2427
  return ret;
2428
}
2429
 
2430
/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2431
   rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2432
 
2433
   TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2434
   condition is true or false, respectively.  If null, we should generate
2435
   our own to skip over the evaluation of this specific expression.
2436
 
2437
   LOCUS is the source location of the COND_EXPR.
2438
 
2439
   This function is the tree equivalent of do_jump.
2440
 
2441
   shortcut_cond_r should only be called by shortcut_cond_expr.  */
2442
 
2443
static tree
2444
shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2445
                 location_t locus)
2446
{
2447
  tree local_label = NULL_TREE;
2448
  tree t, expr = NULL;
2449
 
2450
  /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2451
     retain the shortcut semantics.  Just insert the gotos here;
2452
     shortcut_cond_expr will append the real blocks later.  */
2453
  if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2454
    {
2455
      location_t new_locus;
2456
 
2457
      /* Turn if (a && b) into
2458
 
2459
         if (a); else goto no;
2460
         if (b) goto yes; else goto no;
2461
         (no:) */
2462
 
2463
      if (false_label_p == NULL)
2464
        false_label_p = &local_label;
2465
 
2466
      /* Keep the original source location on the first 'if'.  */
2467
      t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2468
      append_to_statement_list (t, &expr);
2469
 
2470
      /* Set the source location of the && on the second 'if'.  */
2471
      new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2472
      t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2473
                           new_locus);
2474
      append_to_statement_list (t, &expr);
2475
    }
2476
  else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2477
    {
2478
      location_t new_locus;
2479
 
2480
      /* Turn if (a || b) into
2481
 
2482
         if (a) goto yes;
2483
         if (b) goto yes; else goto no;
2484
         (yes:) */
2485
 
2486
      if (true_label_p == NULL)
2487
        true_label_p = &local_label;
2488
 
2489
      /* Keep the original source location on the first 'if'.  */
2490
      t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2491
      append_to_statement_list (t, &expr);
2492
 
2493
      /* Set the source location of the || on the second 'if'.  */
2494
      new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2495
      t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2496
                           new_locus);
2497
      append_to_statement_list (t, &expr);
2498
    }
2499
  else if (TREE_CODE (pred) == COND_EXPR)
2500
    {
2501
      location_t new_locus;
2502
 
2503
      /* As long as we're messing with gotos, turn if (a ? b : c) into
2504
         if (a)
2505
           if (b) goto yes; else goto no;
2506
         else
2507
           if (c) goto yes; else goto no;  */
2508
 
2509
      /* Keep the original source location on the first 'if'.  Set the source
2510
         location of the ? on the second 'if'.  */
2511
      new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2512
      expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2513
                     shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2514
                                      false_label_p, locus),
2515
                     shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2516
                                      false_label_p, new_locus));
2517
    }
2518
  else
2519
    {
2520
      expr = build3 (COND_EXPR, void_type_node, pred,
2521
                     build_and_jump (true_label_p),
2522
                     build_and_jump (false_label_p));
2523
      SET_EXPR_LOCATION (expr, locus);
2524
    }
2525
 
2526
  if (local_label)
2527
    {
2528
      t = build1 (LABEL_EXPR, void_type_node, local_label);
2529
      append_to_statement_list (t, &expr);
2530
    }
2531
 
2532
  return expr;
2533
}
2534
 
2535
/* Given a conditional expression EXPR with short-circuit boolean
2536
   predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2537
   predicate appart into the equivalent sequence of conditionals.  */
2538
 
2539
static tree
2540
shortcut_cond_expr (tree expr)
2541
{
2542
  tree pred = TREE_OPERAND (expr, 0);
2543
  tree then_ = TREE_OPERAND (expr, 1);
2544
  tree else_ = TREE_OPERAND (expr, 2);
2545
  tree true_label, false_label, end_label, t;
2546
  tree *true_label_p;
2547
  tree *false_label_p;
2548
  bool emit_end, emit_false, jump_over_else;
2549
  bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2550
  bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2551
 
2552
  /* First do simple transformations.  */
2553
  if (!else_se)
2554
    {
2555
      /* If there is no 'else', turn
2556
           if (a && b) then c
2557
         into
2558
           if (a) if (b) then c.  */
2559
      while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2560
        {
2561
          /* Keep the original source location on the first 'if'.  */
2562
          location_t locus = EXPR_HAS_LOCATION (expr)
2563
                             ? EXPR_LOCATION (expr) : input_location;
2564
          TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2565
          /* Set the source location of the && on the second 'if'.  */
2566
          if (EXPR_HAS_LOCATION (pred))
2567
            SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2568
          then_ = shortcut_cond_expr (expr);
2569
          then_se = then_ && TREE_SIDE_EFFECTS (then_);
2570
          pred = TREE_OPERAND (pred, 0);
2571
          expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2572
          SET_EXPR_LOCATION (expr, locus);
2573
        }
2574
    }
2575
 
2576
  if (!then_se)
2577
    {
2578
      /* If there is no 'then', turn
2579
           if (a || b); else d
2580
         into
2581
           if (a); else if (b); else d.  */
2582
      while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2583
        {
2584
          /* Keep the original source location on the first 'if'.  */
2585
          location_t locus = EXPR_HAS_LOCATION (expr)
2586
                             ? EXPR_LOCATION (expr) : input_location;
2587
          TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2588
          /* Set the source location of the || on the second 'if'.  */
2589
          if (EXPR_HAS_LOCATION (pred))
2590
            SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2591
          else_ = shortcut_cond_expr (expr);
2592
          else_se = else_ && TREE_SIDE_EFFECTS (else_);
2593
          pred = TREE_OPERAND (pred, 0);
2594
          expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2595
          SET_EXPR_LOCATION (expr, locus);
2596
        }
2597
    }
2598
 
2599
  /* If we're done, great.  */
2600
  if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2601
      && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2602
    return expr;
2603
 
2604
  /* Otherwise we need to mess with gotos.  Change
2605
       if (a) c; else d;
2606
     to
2607
       if (a); else goto no;
2608
       c; goto end;
2609
       no: d; end:
2610
     and recursively gimplify the condition.  */
2611
 
2612
  true_label = false_label = end_label = NULL_TREE;
2613
 
2614
  /* If our arms just jump somewhere, hijack those labels so we don't
2615
     generate jumps to jumps.  */
2616
 
2617
  if (then_
2618
      && TREE_CODE (then_) == GOTO_EXPR
2619
      && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2620
    {
2621
      true_label = GOTO_DESTINATION (then_);
2622
      then_ = NULL;
2623
      then_se = false;
2624
    }
2625
 
2626
  if (else_
2627
      && TREE_CODE (else_) == GOTO_EXPR
2628
      && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2629
    {
2630
      false_label = GOTO_DESTINATION (else_);
2631
      else_ = NULL;
2632
      else_se = false;
2633
    }
2634
 
2635
  /* If we aren't hijacking a label for the 'then' branch, it falls through.  */
2636
  if (true_label)
2637
    true_label_p = &true_label;
2638
  else
2639
    true_label_p = NULL;
2640
 
2641
  /* The 'else' branch also needs a label if it contains interesting code.  */
2642
  if (false_label || else_se)
2643
    false_label_p = &false_label;
2644
  else
2645
    false_label_p = NULL;
2646
 
2647
  /* If there was nothing else in our arms, just forward the label(s).  */
2648
  if (!then_se && !else_se)
2649
    return shortcut_cond_r (pred, true_label_p, false_label_p,
2650
                            EXPR_HAS_LOCATION (expr)
2651
                            ? EXPR_LOCATION (expr) : input_location);
2652
 
2653
  /* If our last subexpression already has a terminal label, reuse it.  */
2654
  if (else_se)
2655
    t = expr_last (else_);
2656
  else if (then_se)
2657
    t = expr_last (then_);
2658
  else
2659
    t = NULL;
2660
  if (t && TREE_CODE (t) == LABEL_EXPR)
2661
    end_label = LABEL_EXPR_LABEL (t);
2662
 
2663
  /* If we don't care about jumping to the 'else' branch, jump to the end
2664
     if the condition is false.  */
2665
  if (!false_label_p)
2666
    false_label_p = &end_label;
2667
 
2668
  /* We only want to emit these labels if we aren't hijacking them.  */
2669
  emit_end = (end_label == NULL_TREE);
2670
  emit_false = (false_label == NULL_TREE);
2671
 
2672
  /* We only emit the jump over the else clause if we have to--if the
2673
     then clause may fall through.  Otherwise we can wind up with a
2674
     useless jump and a useless label at the end of gimplified code,
2675
     which will cause us to think that this conditional as a whole
2676
     falls through even if it doesn't.  If we then inline a function
2677
     which ends with such a condition, that can cause us to issue an
2678
     inappropriate warning about control reaching the end of a
2679
     non-void function.  */
2680
  jump_over_else = block_may_fallthru (then_);
2681
 
2682
  pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2683
                          EXPR_HAS_LOCATION (expr)
2684
                          ? EXPR_LOCATION (expr) : input_location);
2685
 
2686
  expr = NULL;
2687
  append_to_statement_list (pred, &expr);
2688
 
2689
  append_to_statement_list (then_, &expr);
2690
  if (else_se)
2691
    {
2692
      if (jump_over_else)
2693
        {
2694
          tree last = expr_last (expr);
2695
          t = build_and_jump (&end_label);
2696
          if (EXPR_HAS_LOCATION (last))
2697
            SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2698
          append_to_statement_list (t, &expr);
2699
        }
2700
      if (emit_false)
2701
        {
2702
          t = build1 (LABEL_EXPR, void_type_node, false_label);
2703
          append_to_statement_list (t, &expr);
2704
        }
2705
      append_to_statement_list (else_, &expr);
2706
    }
2707
  if (emit_end && end_label)
2708
    {
2709
      t = build1 (LABEL_EXPR, void_type_node, end_label);
2710
      append_to_statement_list (t, &expr);
2711
    }
2712
 
2713
  return expr;
2714
}
2715
 
2716
/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE.  */
2717
 
2718
tree
2719
gimple_boolify (tree expr)
2720
{
2721
  tree type = TREE_TYPE (expr);
2722
  location_t loc = EXPR_LOCATION (expr);
2723
 
2724
  if (TREE_CODE (expr) == NE_EXPR
2725
      && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2726
      && integer_zerop (TREE_OPERAND (expr, 1)))
2727
    {
2728
      tree call = TREE_OPERAND (expr, 0);
2729
      tree fn = get_callee_fndecl (call);
2730
 
2731
      /* For __builtin_expect ((long) (x), y) recurse into x as well
2732
         if x is truth_value_p.  */
2733
      if (fn
2734
          && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2735
          && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2736
          && call_expr_nargs (call) == 2)
2737
        {
2738
          tree arg = CALL_EXPR_ARG (call, 0);
2739
          if (arg)
2740
            {
2741
              if (TREE_CODE (arg) == NOP_EXPR
2742
                  && TREE_TYPE (arg) == TREE_TYPE (call))
2743
                arg = TREE_OPERAND (arg, 0);
2744
              if (truth_value_p (TREE_CODE (arg)))
2745
                {
2746
                  arg = gimple_boolify (arg);
2747
                  CALL_EXPR_ARG (call, 0)
2748
                    = fold_convert_loc (loc, TREE_TYPE (call), arg);
2749
                }
2750
            }
2751
        }
2752
    }
2753
 
2754
  if (TREE_CODE (type) == BOOLEAN_TYPE)
2755
    return expr;
2756
 
2757
  switch (TREE_CODE (expr))
2758
    {
2759
    case TRUTH_AND_EXPR:
2760
    case TRUTH_OR_EXPR:
2761
    case TRUTH_XOR_EXPR:
2762
    case TRUTH_ANDIF_EXPR:
2763
    case TRUTH_ORIF_EXPR:
2764
      /* Also boolify the arguments of truth exprs.  */
2765
      TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2766
      /* FALLTHRU */
2767
 
2768
    case TRUTH_NOT_EXPR:
2769
      TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2770
      /* FALLTHRU */
2771
 
2772
    case EQ_EXPR: case NE_EXPR:
2773
    case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2774
      /* These expressions always produce boolean results.  */
2775
      TREE_TYPE (expr) = boolean_type_node;
2776
      return expr;
2777
 
2778
    default:
2779
      /* Other expressions that get here must have boolean values, but
2780
         might need to be converted to the appropriate mode.  */
2781
      return fold_convert_loc (loc, boolean_type_node, expr);
2782
    }
2783
}
2784
 
2785
/* Given a conditional expression *EXPR_P without side effects, gimplify
2786
   its operands.  New statements are inserted to PRE_P.  */
2787
 
2788
static enum gimplify_status
2789
gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2790
{
2791
  tree expr = *expr_p, cond;
2792
  enum gimplify_status ret, tret;
2793
  enum tree_code code;
2794
 
2795
  cond = gimple_boolify (COND_EXPR_COND (expr));
2796
 
2797
  /* We need to handle && and || specially, as their gimplification
2798
     creates pure cond_expr, thus leading to an infinite cycle otherwise.  */
2799
  code = TREE_CODE (cond);
2800
  if (code == TRUTH_ANDIF_EXPR)
2801
    TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2802
  else if (code == TRUTH_ORIF_EXPR)
2803
    TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2804
  ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2805
  COND_EXPR_COND (*expr_p) = cond;
2806
 
2807
  tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2808
                                   is_gimple_val, fb_rvalue);
2809
  ret = MIN (ret, tret);
2810
  tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2811
                                   is_gimple_val, fb_rvalue);
2812
 
2813
  return MIN (ret, tret);
2814
}
2815
 
2816
/* Returns true if evaluating EXPR could trap.
2817
   EXPR is GENERIC, while tree_could_trap_p can be called
2818
   only on GIMPLE.  */
2819
 
2820
static bool
2821
generic_expr_could_trap_p (tree expr)
2822
{
2823
  unsigned i, n;
2824
 
2825
  if (!expr || is_gimple_val (expr))
2826
    return false;
2827
 
2828
  if (!EXPR_P (expr) || tree_could_trap_p (expr))
2829
    return true;
2830
 
2831
  n = TREE_OPERAND_LENGTH (expr);
2832
  for (i = 0; i < n; i++)
2833
    if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2834
      return true;
2835
 
2836
  return false;
2837
}
2838
 
2839
/*  Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2840
    into
2841
 
2842
    if (p)                      if (p)
2843
      t1 = a;                     a;
2844
    else                or      else
2845
      t1 = b;                     b;
2846
    t1;
2847
 
2848
    The second form is used when *EXPR_P is of type void.
2849
 
2850
    PRE_P points to the list where side effects that must happen before
2851
      *EXPR_P should be stored.  */
2852
 
2853
static enum gimplify_status
2854
gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2855
{
2856
  tree expr = *expr_p;
2857
  tree tmp, type, arm1, arm2;
2858
  enum gimplify_status ret;
2859
  tree label_true, label_false, label_cont;
2860
  bool have_then_clause_p, have_else_clause_p;
2861
  gimple gimple_cond;
2862
  enum tree_code pred_code;
2863
  gimple_seq seq = NULL;
2864
  location_t loc = EXPR_LOCATION (*expr_p);
2865
 
2866
  type = TREE_TYPE (expr);
2867
 
2868
  /* If this COND_EXPR has a value, copy the values into a temporary within
2869
     the arms.  */
2870
  if (! VOID_TYPE_P (type))
2871
    {
2872
      tree result;
2873
 
2874
      /* If an rvalue is ok or we do not require an lvalue, avoid creating
2875
         an addressable temporary.  */
2876
      if (((fallback & fb_rvalue)
2877
           || !(fallback & fb_lvalue))
2878
          && !TREE_ADDRESSABLE (type))
2879
        {
2880
          if (gimplify_ctxp->allow_rhs_cond_expr
2881
              /* If either branch has side effects or could trap, it can't be
2882
                 evaluated unconditionally.  */
2883
              && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1))
2884
              && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1))
2885
              && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2))
2886
              && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2)))
2887
            return gimplify_pure_cond_expr (expr_p, pre_p);
2888
 
2889
          result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2890
          ret = GS_ALL_DONE;
2891
        }
2892
      else
2893
        {
2894
          tree type = build_pointer_type (TREE_TYPE (expr));
2895
 
2896
          if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2897
            TREE_OPERAND (expr, 1) =
2898
              build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 1));
2899
 
2900
          if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2901
            TREE_OPERAND (expr, 2) =
2902
              build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 2));
2903
 
2904
          tmp = create_tmp_var (type, "iftmp");
2905
 
2906
          expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2907
                         TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2908
 
2909
          result = build_fold_indirect_ref_loc (loc, tmp);
2910
        }
2911
 
2912
      /* Build the then clause, 't1 = a;'.  But don't build an assignment
2913
         if this branch is void; in C++ it can be, if it's a throw.  */
2914
      if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2915
        TREE_OPERAND (expr, 1)
2916
          = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1));
2917
 
2918
      /* Build the else clause, 't1 = b;'.  */
2919
      if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2920
        TREE_OPERAND (expr, 2)
2921
          = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2));
2922
 
2923
      TREE_TYPE (expr) = void_type_node;
2924
      recalculate_side_effects (expr);
2925
 
2926
      /* Move the COND_EXPR to the prequeue.  */
2927
      gimplify_stmt (&expr, pre_p);
2928
 
2929
      *expr_p = result;
2930
      return GS_ALL_DONE;
2931
    }
2932
 
2933
  /* Make sure the condition has BOOLEAN_TYPE.  */
2934
  TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2935
 
2936
  /* Break apart && and || conditions.  */
2937
  if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2938
      || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2939
    {
2940
      expr = shortcut_cond_expr (expr);
2941
 
2942
      if (expr != *expr_p)
2943
        {
2944
          *expr_p = expr;
2945
 
2946
          /* We can't rely on gimplify_expr to re-gimplify the expanded
2947
             form properly, as cleanups might cause the target labels to be
2948
             wrapped in a TRY_FINALLY_EXPR.  To prevent that, we need to
2949
             set up a conditional context.  */
2950
          gimple_push_condition ();
2951
          gimplify_stmt (expr_p, &seq);
2952
          gimple_pop_condition (pre_p);
2953
          gimple_seq_add_seq (pre_p, seq);
2954
 
2955
          return GS_ALL_DONE;
2956
        }
2957
    }
2958
 
2959
  /* Now do the normal gimplification.  */
2960
 
2961
  /* Gimplify condition.  */
2962
  ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2963
                       fb_rvalue);
2964
  if (ret == GS_ERROR)
2965
    return GS_ERROR;
2966
  gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2967
 
2968
  gimple_push_condition ();
2969
 
2970
  have_then_clause_p = have_else_clause_p = false;
2971
  if (TREE_OPERAND (expr, 1) != NULL
2972
      && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
2973
      && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
2974
      && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
2975
          == current_function_decl)
2976
      /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2977
         have different locations, otherwise we end up with incorrect
2978
         location information on the branches.  */
2979
      && (optimize
2980
          || !EXPR_HAS_LOCATION (expr)
2981
          || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
2982
          || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
2983
    {
2984
      label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
2985
      have_then_clause_p = true;
2986
    }
2987
  else
2988
    label_true = create_artificial_label (UNKNOWN_LOCATION);
2989
  if (TREE_OPERAND (expr, 2) != NULL
2990
      && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
2991
      && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
2992
      && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
2993
          == current_function_decl)
2994
      /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2995
         have different locations, otherwise we end up with incorrect
2996
         location information on the branches.  */
2997
      && (optimize
2998
          || !EXPR_HAS_LOCATION (expr)
2999
          || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3000
          || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3001
    {
3002
      label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3003
      have_else_clause_p = true;
3004
    }
3005
  else
3006
    label_false = create_artificial_label (UNKNOWN_LOCATION);
3007
 
3008
  gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3009
                                 &arm2);
3010
 
3011
  gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3012
                                   label_false);
3013
 
3014
  gimplify_seq_add_stmt (&seq, gimple_cond);
3015
  label_cont = NULL_TREE;
3016
  if (!have_then_clause_p)
3017
    {
3018
      /* For if (...) {} else { code; } put label_true after
3019
         the else block.  */
3020
      if (TREE_OPERAND (expr, 1) == NULL_TREE
3021
          && !have_else_clause_p
3022
          && TREE_OPERAND (expr, 2) != NULL_TREE)
3023
        label_cont = label_true;
3024
      else
3025
        {
3026
          gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3027
          have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3028
          /* For if (...) { code; } else {} or
3029
             if (...) { code; } else goto label; or
3030
             if (...) { code; return; } else { ... }
3031
             label_cont isn't needed.  */
3032
          if (!have_else_clause_p
3033
              && TREE_OPERAND (expr, 2) != NULL_TREE
3034
              && gimple_seq_may_fallthru (seq))
3035
            {
3036
              gimple g;
3037
              label_cont = create_artificial_label (UNKNOWN_LOCATION);
3038
 
3039
              g = gimple_build_goto (label_cont);
3040
 
3041
              /* GIMPLE_COND's are very low level; they have embedded
3042
                 gotos.  This particular embedded goto should not be marked
3043
                 with the location of the original COND_EXPR, as it would
3044
                 correspond to the COND_EXPR's condition, not the ELSE or the
3045
                 THEN arms.  To avoid marking it with the wrong location, flag
3046
                 it as "no location".  */
3047
              gimple_set_do_not_emit_location (g);
3048
 
3049
              gimplify_seq_add_stmt (&seq, g);
3050
            }
3051
        }
3052
    }
3053
  if (!have_else_clause_p)
3054
    {
3055
      gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3056
      have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3057
    }
3058
  if (label_cont)
3059
    gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3060
 
3061
  gimple_pop_condition (pre_p);
3062
  gimple_seq_add_seq (pre_p, seq);
3063
 
3064
  if (ret == GS_ERROR)
3065
    ; /* Do nothing.  */
3066
  else if (have_then_clause_p || have_else_clause_p)
3067
    ret = GS_ALL_DONE;
3068
  else
3069
    {
3070
      /* Both arms are empty; replace the COND_EXPR with its predicate.  */
3071
      expr = TREE_OPERAND (expr, 0);
3072
      gimplify_stmt (&expr, pre_p);
3073
    }
3074
 
3075
  *expr_p = NULL;
3076
  return ret;
3077
}
3078
 
3079
/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3080
   to be marked addressable.
3081
 
3082
   We cannot rely on such an expression being directly markable if a temporary
3083
   has been created by the gimplification.  In this case, we create another
3084
   temporary and initialize it with a copy, which will become a store after we
3085
   mark it addressable.  This can happen if the front-end passed us something
3086
   that it could not mark addressable yet, like a Fortran pass-by-reference
3087
   parameter (int) floatvar.  */
3088
 
3089
static void
3090
prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3091
{
3092
  while (handled_component_p (*expr_p))
3093
    expr_p = &TREE_OPERAND (*expr_p, 0);
3094
  if (is_gimple_reg (*expr_p))
3095
    *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3096
}
3097
 
3098
/* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
3099
   a call to __builtin_memcpy.  */
3100
 
3101
static enum gimplify_status
3102
gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3103
                                gimple_seq *seq_p)
3104
{
3105
  tree t, to, to_ptr, from, from_ptr;
3106
  gimple gs;
3107
  location_t loc = EXPR_LOCATION (*expr_p);
3108
 
3109
  to = TREE_OPERAND (*expr_p, 0);
3110
  from = TREE_OPERAND (*expr_p, 1);
3111
 
3112
  /* Mark the RHS addressable.  Beware that it may not be possible to do so
3113
     directly if a temporary has been created by the gimplification.  */
3114
  prepare_gimple_addressable (&from, seq_p);
3115
 
3116
  mark_addressable (from);
3117
  from_ptr = build_fold_addr_expr_loc (loc, from);
3118
  gimplify_arg (&from_ptr, seq_p, loc);
3119
 
3120
  mark_addressable (to);
3121
  to_ptr = build_fold_addr_expr_loc (loc, to);
3122
  gimplify_arg (&to_ptr, seq_p, loc);
3123
 
3124
  t = implicit_built_in_decls[BUILT_IN_MEMCPY];
3125
 
3126
  gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3127
 
3128
  if (want_value)
3129
    {
3130
      /* tmp = memcpy() */
3131
      t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3132
      gimple_call_set_lhs (gs, t);
3133
      gimplify_seq_add_stmt (seq_p, gs);
3134
 
3135
      *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3136
      return GS_ALL_DONE;
3137
    }
3138
 
3139
  gimplify_seq_add_stmt (seq_p, gs);
3140
  *expr_p = NULL;
3141
  return GS_ALL_DONE;
3142
}
3143
 
3144
/* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
3145
   a call to __builtin_memset.  In this case we know that the RHS is
3146
   a CONSTRUCTOR with an empty element list.  */
3147
 
3148
static enum gimplify_status
3149
gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3150
                                gimple_seq *seq_p)
3151
{
3152
  tree t, from, to, to_ptr;
3153
  gimple gs;
3154
  location_t loc = EXPR_LOCATION (*expr_p);
3155
 
3156
  /* Assert our assumptions, to abort instead of producing wrong code
3157
     silently if they are not met.  Beware that the RHS CONSTRUCTOR might
3158
     not be immediately exposed.  */
3159
  from = TREE_OPERAND (*expr_p, 1);
3160
  if (TREE_CODE (from) == WITH_SIZE_EXPR)
3161
    from = TREE_OPERAND (from, 0);
3162
 
3163
  gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3164
              && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3165
 
3166
  /* Now proceed.  */
3167
  to = TREE_OPERAND (*expr_p, 0);
3168
 
3169
  to_ptr = build_fold_addr_expr_loc (loc, to);
3170
  gimplify_arg (&to_ptr, seq_p, loc);
3171
  t = implicit_built_in_decls[BUILT_IN_MEMSET];
3172
 
3173
  gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3174
 
3175
  if (want_value)
3176
    {
3177
      /* tmp = memset() */
3178
      t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3179
      gimple_call_set_lhs (gs, t);
3180
      gimplify_seq_add_stmt (seq_p, gs);
3181
 
3182
      *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3183
      return GS_ALL_DONE;
3184
    }
3185
 
3186
  gimplify_seq_add_stmt (seq_p, gs);
3187
  *expr_p = NULL;
3188
  return GS_ALL_DONE;
3189
}
3190
 
3191
/* A subroutine of gimplify_init_ctor_preeval.  Called via walk_tree,
3192
   determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3193
   assignment.  Returns non-null if we detect a potential overlap.  */
3194
 
3195
struct gimplify_init_ctor_preeval_data
3196
{
3197
  /* The base decl of the lhs object.  May be NULL, in which case we
3198
     have to assume the lhs is indirect.  */
3199
  tree lhs_base_decl;
3200
 
3201
  /* The alias set of the lhs object.  */
3202
  alias_set_type lhs_alias_set;
3203
};
3204
 
3205
static tree
3206
gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3207
{
3208
  struct gimplify_init_ctor_preeval_data *data
3209
    = (struct gimplify_init_ctor_preeval_data *) xdata;
3210
  tree t = *tp;
3211
 
3212
  /* If we find the base object, obviously we have overlap.  */
3213
  if (data->lhs_base_decl == t)
3214
    return t;
3215
 
3216
  /* If the constructor component is indirect, determine if we have a
3217
     potential overlap with the lhs.  The only bits of information we
3218
     have to go on at this point are addressability and alias sets.  */
3219
  if (TREE_CODE (t) == INDIRECT_REF
3220
      && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3221
      && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3222
    return t;
3223
 
3224
  /* If the constructor component is a call, determine if it can hide a
3225
     potential overlap with the lhs through an INDIRECT_REF like above.  */
3226
  if (TREE_CODE (t) == CALL_EXPR)
3227
    {
3228
      tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3229
 
3230
      for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3231
        if (POINTER_TYPE_P (TREE_VALUE (type))
3232
            && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3233
            && alias_sets_conflict_p (data->lhs_alias_set,
3234
                                      get_alias_set
3235
                                        (TREE_TYPE (TREE_VALUE (type)))))
3236
          return t;
3237
    }
3238
 
3239
  if (IS_TYPE_OR_DECL_P (t))
3240
    *walk_subtrees = 0;
3241
  return NULL;
3242
}
3243
 
3244
/* A subroutine of gimplify_init_constructor.  Pre-evaluate EXPR,
3245
   force values that overlap with the lhs (as described by *DATA)
3246
   into temporaries.  */
3247
 
3248
static void
3249
gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3250
                            struct gimplify_init_ctor_preeval_data *data)
3251
{
3252
  enum gimplify_status one;
3253
 
3254
  /* If the value is constant, then there's nothing to pre-evaluate.  */
3255
  if (TREE_CONSTANT (*expr_p))
3256
    {
3257
      /* Ensure it does not have side effects, it might contain a reference to
3258
         the object we're initializing.  */
3259
      gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3260
      return;
3261
    }
3262
 
3263
  /* If the type has non-trivial constructors, we can't pre-evaluate.  */
3264
  if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3265
    return;
3266
 
3267
  /* Recurse for nested constructors.  */
3268
  if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3269
    {
3270
      unsigned HOST_WIDE_INT ix;
3271
      constructor_elt *ce;
3272
      VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3273
 
3274
      for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
3275
        gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3276
 
3277
      return;
3278
    }
3279
 
3280
  /* If this is a variable sized type, we must remember the size.  */
3281
  maybe_with_size_expr (expr_p);
3282
 
3283
  /* Gimplify the constructor element to something appropriate for the rhs
3284
     of a MODIFY_EXPR.  Given that we know the LHS is an aggregate, we know
3285
     the gimplifier will consider this a store to memory.  Doing this
3286
     gimplification now means that we won't have to deal with complicated
3287
     language-specific trees, nor trees like SAVE_EXPR that can induce
3288
     exponential search behavior.  */
3289
  one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3290
  if (one == GS_ERROR)
3291
    {
3292
      *expr_p = NULL;
3293
      return;
3294
    }
3295
 
3296
  /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3297
     with the lhs, since "a = { .x=a }" doesn't make sense.  This will
3298
     always be true for all scalars, since is_gimple_mem_rhs insists on a
3299
     temporary variable for them.  */
3300
  if (DECL_P (*expr_p))
3301
    return;
3302
 
3303
  /* If this is of variable size, we have no choice but to assume it doesn't
3304
     overlap since we can't make a temporary for it.  */
3305
  if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3306
    return;
3307
 
3308
  /* Otherwise, we must search for overlap ...  */
3309
  if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3310
    return;
3311
 
3312
  /* ... and if found, force the value into a temporary.  */
3313
  *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3314
}
3315
 
3316
/* A subroutine of gimplify_init_ctor_eval.  Create a loop for
3317
   a RANGE_EXPR in a CONSTRUCTOR for an array.
3318
 
3319
      var = lower;
3320
    loop_entry:
3321
      object[var] = value;
3322
      if (var == upper)
3323
        goto loop_exit;
3324
      var = var + 1;
3325
      goto loop_entry;
3326
    loop_exit:
3327
 
3328
   We increment var _after_ the loop exit check because we might otherwise
3329
   fail if upper == TYPE_MAX_VALUE (type for upper).
3330
 
3331
   Note that we never have to deal with SAVE_EXPRs here, because this has
3332
   already been taken care of for us, in gimplify_init_ctor_preeval().  */
3333
 
3334
static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3335
                                     gimple_seq *, bool);
3336
 
3337
static void
3338
gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3339
                               tree value, tree array_elt_type,
3340
                               gimple_seq *pre_p, bool cleared)
3341
{
3342
  tree loop_entry_label, loop_exit_label, fall_thru_label;
3343
  tree var, var_type, cref, tmp;
3344
 
3345
  loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3346
  loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3347
  fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3348
 
3349
  /* Create and initialize the index variable.  */
3350
  var_type = TREE_TYPE (upper);
3351
  var = create_tmp_var (var_type, NULL);
3352
  gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3353
 
3354
  /* Add the loop entry label.  */
3355
  gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3356
 
3357
  /* Build the reference.  */
3358
  cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3359
                 var, NULL_TREE, NULL_TREE);
3360
 
3361
  /* If we are a constructor, just call gimplify_init_ctor_eval to do
3362
     the store.  Otherwise just assign value to the reference.  */
3363
 
3364
  if (TREE_CODE (value) == CONSTRUCTOR)
3365
    /* NB we might have to call ourself recursively through
3366
       gimplify_init_ctor_eval if the value is a constructor.  */
3367
    gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3368
                             pre_p, cleared);
3369
  else
3370
    gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3371
 
3372
  /* We exit the loop when the index var is equal to the upper bound.  */
3373
  gimplify_seq_add_stmt (pre_p,
3374
                         gimple_build_cond (EQ_EXPR, var, upper,
3375
                                            loop_exit_label, fall_thru_label));
3376
 
3377
  gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3378
 
3379
  /* Otherwise, increment the index var...  */
3380
  tmp = build2 (PLUS_EXPR, var_type, var,
3381
                fold_convert (var_type, integer_one_node));
3382
  gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3383
 
3384
  /* ...and jump back to the loop entry.  */
3385
  gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3386
 
3387
  /* Add the loop exit label.  */
3388
  gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3389
}
3390
 
3391
/* Return true if FDECL is accessing a field that is zero sized.  */
3392
 
3393
static bool
3394
zero_sized_field_decl (const_tree fdecl)
3395
{
3396
  if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3397
      && integer_zerop (DECL_SIZE (fdecl)))
3398
    return true;
3399
  return false;
3400
}
3401
 
3402
/* Return true if TYPE is zero sized.  */
3403
 
3404
static bool
3405
zero_sized_type (const_tree type)
3406
{
3407
  if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3408
      && integer_zerop (TYPE_SIZE (type)))
3409
    return true;
3410
  return false;
3411
}
3412
 
3413
/* A subroutine of gimplify_init_constructor.  Generate individual
3414
   MODIFY_EXPRs for a CONSTRUCTOR.  OBJECT is the LHS against which the
3415
   assignments should happen.  ELTS is the CONSTRUCTOR_ELTS of the
3416
   CONSTRUCTOR.  CLEARED is true if the entire LHS object has been
3417
   zeroed first.  */
3418
 
3419
static void
3420
gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3421
                         gimple_seq *pre_p, bool cleared)
3422
{
3423
  tree array_elt_type = NULL;
3424
  unsigned HOST_WIDE_INT ix;
3425
  tree purpose, value;
3426
 
3427
  if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3428
    array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3429
 
3430
  FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3431
    {
3432
      tree cref;
3433
 
3434
      /* NULL values are created above for gimplification errors.  */
3435
      if (value == NULL)
3436
        continue;
3437
 
3438
      if (cleared && initializer_zerop (value))
3439
        continue;
3440
 
3441
      /* ??? Here's to hoping the front end fills in all of the indices,
3442
         so we don't have to figure out what's missing ourselves.  */
3443
      gcc_assert (purpose);
3444
 
3445
      /* Skip zero-sized fields, unless value has side-effects.  This can
3446
         happen with calls to functions returning a zero-sized type, which
3447
         we shouldn't discard.  As a number of downstream passes don't
3448
         expect sets of zero-sized fields, we rely on the gimplification of
3449
         the MODIFY_EXPR we make below to drop the assignment statement.  */
3450
      if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3451
        continue;
3452
 
3453
      /* If we have a RANGE_EXPR, we have to build a loop to assign the
3454
         whole range.  */
3455
      if (TREE_CODE (purpose) == RANGE_EXPR)
3456
        {
3457
          tree lower = TREE_OPERAND (purpose, 0);
3458
          tree upper = TREE_OPERAND (purpose, 1);
3459
 
3460
          /* If the lower bound is equal to upper, just treat it as if
3461
             upper was the index.  */
3462
          if (simple_cst_equal (lower, upper))
3463
            purpose = upper;
3464
          else
3465
            {
3466
              gimplify_init_ctor_eval_range (object, lower, upper, value,
3467
                                             array_elt_type, pre_p, cleared);
3468
              continue;
3469
            }
3470
        }
3471
 
3472
      if (array_elt_type)
3473
        {
3474
          /* Do not use bitsizetype for ARRAY_REF indices.  */
3475
          if (TYPE_DOMAIN (TREE_TYPE (object)))
3476
            purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3477
                                    purpose);
3478
          cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3479
                         purpose, NULL_TREE, NULL_TREE);
3480
        }
3481
      else
3482
        {
3483
          gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3484
          cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3485
                         unshare_expr (object), purpose, NULL_TREE);
3486
        }
3487
 
3488
      if (TREE_CODE (value) == CONSTRUCTOR
3489
          && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3490
        gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3491
                                 pre_p, cleared);
3492
      else
3493
        {
3494
          tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3495
          gimplify_and_add (init, pre_p);
3496
          ggc_free (init);
3497
        }
3498
    }
3499
}
3500
 
3501
 
3502
/* Returns the appropriate RHS predicate for this LHS.  */
3503
 
3504
gimple_predicate
3505
rhs_predicate_for (tree lhs)
3506
{
3507
  if (is_gimple_reg (lhs))
3508
    return is_gimple_reg_rhs_or_call;
3509
  else
3510
    return is_gimple_mem_rhs_or_call;
3511
}
3512
 
3513
/* Gimplify a C99 compound literal expression.  This just means adding
3514
   the DECL_EXPR before the current statement and using its anonymous
3515
   decl instead.  */
3516
 
3517
static enum gimplify_status
3518
gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
3519
{
3520
  tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3521
  tree decl = DECL_EXPR_DECL (decl_s);
3522
  /* Mark the decl as addressable if the compound literal
3523
     expression is addressable now, otherwise it is marked too late
3524
     after we gimplify the initialization expression.  */
3525
  if (TREE_ADDRESSABLE (*expr_p))
3526
    TREE_ADDRESSABLE (decl) = 1;
3527
 
3528
  /* Preliminarily mark non-addressed complex variables as eligible
3529
     for promotion to gimple registers.  We'll transform their uses
3530
     as we find them.  */
3531
  if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3532
       || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3533
      && !TREE_THIS_VOLATILE (decl)
3534
      && !needs_to_live_in_memory (decl))
3535
    DECL_GIMPLE_REG_P (decl) = 1;
3536
 
3537
  /* This decl isn't mentioned in the enclosing block, so add it to the
3538
     list of temps.  FIXME it seems a bit of a kludge to say that
3539
     anonymous artificial vars aren't pushed, but everything else is.  */
3540
  if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3541
    gimple_add_tmp_var (decl);
3542
 
3543
  gimplify_and_add (decl_s, pre_p);
3544
  *expr_p = decl;
3545
  return GS_OK;
3546
}
3547
 
3548
/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3549
   return a new CONSTRUCTOR if something changed.  */
3550
 
3551
static tree
3552
optimize_compound_literals_in_ctor (tree orig_ctor)
3553
{
3554
  tree ctor = orig_ctor;
3555
  VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3556
  unsigned int idx, num = VEC_length (constructor_elt, elts);
3557
 
3558
  for (idx = 0; idx < num; idx++)
3559
    {
3560
      tree value = VEC_index (constructor_elt, elts, idx)->value;
3561
      tree newval = value;
3562
      if (TREE_CODE (value) == CONSTRUCTOR)
3563
        newval = optimize_compound_literals_in_ctor (value);
3564
      else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3565
        {
3566
          tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3567
          tree decl = DECL_EXPR_DECL (decl_s);
3568
          tree init = DECL_INITIAL (decl);
3569
 
3570
          if (!TREE_ADDRESSABLE (value)
3571
              && !TREE_ADDRESSABLE (decl)
3572
              && init)
3573
            newval = optimize_compound_literals_in_ctor (init);
3574
        }
3575
      if (newval == value)
3576
        continue;
3577
 
3578
      if (ctor == orig_ctor)
3579
        {
3580
          ctor = copy_node (orig_ctor);
3581
          CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3582
          elts = CONSTRUCTOR_ELTS (ctor);
3583
        }
3584
      VEC_index (constructor_elt, elts, idx)->value = newval;
3585
    }
3586
  return ctor;
3587
}
3588
 
3589
 
3590
 
3591
/* A subroutine of gimplify_modify_expr.  Break out elements of a
3592
   CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3593
 
3594
   Note that we still need to clear any elements that don't have explicit
3595
   initializers, so if not all elements are initialized we keep the
3596
   original MODIFY_EXPR, we just remove all of the constructor elements.
3597
 
3598
   If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3599
   GS_ERROR if we would have to create a temporary when gimplifying
3600
   this constructor.  Otherwise, return GS_OK.
3601
 
3602
   If NOTIFY_TEMP_CREATION is false, just do the gimplification.  */
3603
 
3604
static enum gimplify_status
3605
gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3606
                           bool want_value, bool notify_temp_creation)
3607
{
3608
  tree object, ctor, type;
3609
  enum gimplify_status ret;
3610
  VEC(constructor_elt,gc) *elts;
3611
 
3612
  gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3613
 
3614
  if (!notify_temp_creation)
3615
    {
3616
      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3617
                           is_gimple_lvalue, fb_lvalue);
3618
      if (ret == GS_ERROR)
3619
        return ret;
3620
    }
3621
 
3622
  object = TREE_OPERAND (*expr_p, 0);
3623
  ctor = TREE_OPERAND (*expr_p, 1) =
3624
    optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3625
  type = TREE_TYPE (ctor);
3626
  elts = CONSTRUCTOR_ELTS (ctor);
3627
  ret = GS_ALL_DONE;
3628
 
3629
  switch (TREE_CODE (type))
3630
    {
3631
    case RECORD_TYPE:
3632
    case UNION_TYPE:
3633
    case QUAL_UNION_TYPE:
3634
    case ARRAY_TYPE:
3635
      {
3636
        struct gimplify_init_ctor_preeval_data preeval_data;
3637
        HOST_WIDE_INT num_type_elements, num_ctor_elements;
3638
        HOST_WIDE_INT num_nonzero_elements;
3639
        bool cleared, valid_const_initializer;
3640
 
3641
        /* Aggregate types must lower constructors to initialization of
3642
           individual elements.  The exception is that a CONSTRUCTOR node
3643
           with no elements indicates zero-initialization of the whole.  */
3644
        if (VEC_empty (constructor_elt, elts))
3645
          {
3646
            if (notify_temp_creation)
3647
              return GS_OK;
3648
            break;
3649
          }
3650
 
3651
        /* Fetch information about the constructor to direct later processing.
3652
           We might want to make static versions of it in various cases, and
3653
           can only do so if it known to be a valid constant initializer.  */
3654
        valid_const_initializer
3655
          = categorize_ctor_elements (ctor, &num_nonzero_elements,
3656
                                      &num_ctor_elements, &cleared);
3657
 
3658
        /* If a const aggregate variable is being initialized, then it
3659
           should never be a lose to promote the variable to be static.  */
3660
        if (valid_const_initializer
3661
            && num_nonzero_elements > 1
3662
            && TREE_READONLY (object)
3663
            && TREE_CODE (object) == VAR_DECL
3664
            && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3665
          {
3666
            if (notify_temp_creation)
3667
              return GS_ERROR;
3668
            DECL_INITIAL (object) = ctor;
3669
            TREE_STATIC (object) = 1;
3670
            if (!DECL_NAME (object))
3671
              DECL_NAME (object) = create_tmp_var_name ("C");
3672
            walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3673
 
3674
            /* ??? C++ doesn't automatically append a .<number> to the
3675
               assembler name, and even when it does, it looks a FE private
3676
               data structures to figure out what that number should be,
3677
               which are not set for this variable.  I suppose this is
3678
               important for local statics for inline functions, which aren't
3679
               "local" in the object file sense.  So in order to get a unique
3680
               TU-local symbol, we must invoke the lhd version now.  */
3681
            lhd_set_decl_assembler_name (object);
3682
 
3683
            *expr_p = NULL_TREE;
3684
            break;
3685
          }
3686
 
3687
        /* If there are "lots" of initialized elements, even discounting
3688
           those that are not address constants (and thus *must* be
3689
           computed at runtime), then partition the constructor into
3690
           constant and non-constant parts.  Block copy the constant
3691
           parts in, then generate code for the non-constant parts.  */
3692
        /* TODO.  There's code in cp/typeck.c to do this.  */
3693
 
3694
        num_type_elements = count_type_elements (type, true);
3695
 
3696
        /* If count_type_elements could not determine number of type elements
3697
           for a constant-sized object, assume clearing is needed.
3698
           Don't do this for variable-sized objects, as store_constructor
3699
           will ignore the clearing of variable-sized objects.  */
3700
        if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3701
          cleared = true;
3702
        /* If there are "lots" of zeros, then block clear the object first.  */
3703
        else if (num_type_elements - num_nonzero_elements
3704
                 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3705
                 && num_nonzero_elements < num_type_elements/4)
3706
          cleared = true;
3707
        /* ??? This bit ought not be needed.  For any element not present
3708
           in the initializer, we should simply set them to zero.  Except
3709
           we'd need to *find* the elements that are not present, and that
3710
           requires trickery to avoid quadratic compile-time behavior in
3711
           large cases or excessive memory use in small cases.  */
3712
        else if (num_ctor_elements < num_type_elements)
3713
          cleared = true;
3714
 
3715
        /* If there are "lots" of initialized elements, and all of them
3716
           are valid address constants, then the entire initializer can
3717
           be dropped to memory, and then memcpy'd out.  Don't do this
3718
           for sparse arrays, though, as it's more efficient to follow
3719
           the standard CONSTRUCTOR behavior of memset followed by
3720
           individual element initialization.  Also don't do this for small
3721
           all-zero initializers (which aren't big enough to merit
3722
           clearing), and don't try to make bitwise copies of
3723
           TREE_ADDRESSABLE types.  */
3724
        if (valid_const_initializer
3725
            && !(cleared || num_nonzero_elements == 0)
3726
            && !TREE_ADDRESSABLE (type))
3727
          {
3728
            HOST_WIDE_INT size = int_size_in_bytes (type);
3729
            unsigned int align;
3730
 
3731
            /* ??? We can still get unbounded array types, at least
3732
               from the C++ front end.  This seems wrong, but attempt
3733
               to work around it for now.  */
3734
            if (size < 0)
3735
              {
3736
                size = int_size_in_bytes (TREE_TYPE (object));
3737
                if (size >= 0)
3738
                  TREE_TYPE (ctor) = type = TREE_TYPE (object);
3739
              }
3740
 
3741
            /* Find the maximum alignment we can assume for the object.  */
3742
            /* ??? Make use of DECL_OFFSET_ALIGN.  */
3743
            if (DECL_P (object))
3744
              align = DECL_ALIGN (object);
3745
            else
3746
              align = TYPE_ALIGN (type);
3747
 
3748
            if (size > 0
3749
                && num_nonzero_elements > 1
3750
                && !can_move_by_pieces (size, align))
3751
              {
3752
                tree new_tree;
3753
 
3754
                if (notify_temp_creation)
3755
                  return GS_ERROR;
3756
 
3757
                new_tree = create_tmp_var_raw (type, "C");
3758
 
3759
                gimple_add_tmp_var (new_tree);
3760
                TREE_STATIC (new_tree) = 1;
3761
                TREE_READONLY (new_tree) = 1;
3762
                DECL_INITIAL (new_tree) = ctor;
3763
                if (align > DECL_ALIGN (new_tree))
3764
                  {
3765
                    DECL_ALIGN (new_tree) = align;
3766
                    DECL_USER_ALIGN (new_tree) = 1;
3767
                  }
3768
                walk_tree (&DECL_INITIAL (new_tree), force_labels_r, NULL, NULL);
3769
 
3770
                TREE_OPERAND (*expr_p, 1) = new_tree;
3771
 
3772
                /* This is no longer an assignment of a CONSTRUCTOR, but
3773
                   we still may have processing to do on the LHS.  So
3774
                   pretend we didn't do anything here to let that happen.  */
3775
                return GS_UNHANDLED;
3776
              }
3777
          }
3778
 
3779
        /* If the target is volatile and we have non-zero elements
3780
           initialize the target from a temporary.  */
3781
        if (TREE_THIS_VOLATILE (object)
3782
            && !TREE_ADDRESSABLE (type)
3783
            && num_nonzero_elements > 0)
3784
          {
3785
            tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3786
            TREE_OPERAND (*expr_p, 0) = temp;
3787
            *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3788
                              *expr_p,
3789
                              build2 (MODIFY_EXPR, void_type_node,
3790
                                      object, temp));
3791
            return GS_OK;
3792
          }
3793
 
3794
        if (notify_temp_creation)
3795
          return GS_OK;
3796
 
3797
        /* If there are nonzero elements, pre-evaluate to capture elements
3798
           overlapping with the lhs into temporaries.  We must do this before
3799
           clearing to fetch the values before they are zeroed-out.  */
3800
        if (num_nonzero_elements > 0)
3801
          {
3802
            preeval_data.lhs_base_decl = get_base_address (object);
3803
            if (!DECL_P (preeval_data.lhs_base_decl))
3804
              preeval_data.lhs_base_decl = NULL;
3805
            preeval_data.lhs_alias_set = get_alias_set (object);
3806
 
3807
            gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3808
                                        pre_p, post_p, &preeval_data);
3809
          }
3810
 
3811
        if (cleared)
3812
          {
3813
            /* Zap the CONSTRUCTOR element list, which simplifies this case.
3814
               Note that we still have to gimplify, in order to handle the
3815
               case of variable sized types.  Avoid shared tree structures.  */
3816
            CONSTRUCTOR_ELTS (ctor) = NULL;
3817
            TREE_SIDE_EFFECTS (ctor) = 0;
3818
            object = unshare_expr (object);
3819
            gimplify_stmt (expr_p, pre_p);
3820
          }
3821
 
3822
        /* If we have not block cleared the object, or if there are nonzero
3823
           elements in the constructor, add assignments to the individual
3824
           scalar fields of the object.  */
3825
        if (!cleared || num_nonzero_elements > 0)
3826
          gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3827
 
3828
        *expr_p = NULL_TREE;
3829
      }
3830
      break;
3831
 
3832
    case COMPLEX_TYPE:
3833
      {
3834
        tree r, i;
3835
 
3836
        if (notify_temp_creation)
3837
          return GS_OK;
3838
 
3839
        /* Extract the real and imaginary parts out of the ctor.  */
3840
        gcc_assert (VEC_length (constructor_elt, elts) == 2);
3841
        r = VEC_index (constructor_elt, elts, 0)->value;
3842
        i = VEC_index (constructor_elt, elts, 1)->value;
3843
        if (r == NULL || i == NULL)
3844
          {
3845
            tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3846
            if (r == NULL)
3847
              r = zero;
3848
            if (i == NULL)
3849
              i = zero;
3850
          }
3851
 
3852
        /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3853
           represent creation of a complex value.  */
3854
        if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3855
          {
3856
            ctor = build_complex (type, r, i);
3857
            TREE_OPERAND (*expr_p, 1) = ctor;
3858
          }
3859
        else
3860
          {
3861
            ctor = build2 (COMPLEX_EXPR, type, r, i);
3862
            TREE_OPERAND (*expr_p, 1) = ctor;
3863
            ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3864
                                 pre_p,
3865
                                 post_p,
3866
                                 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3867
                                 fb_rvalue);
3868
          }
3869
      }
3870
      break;
3871
 
3872
    case VECTOR_TYPE:
3873
      {
3874
        unsigned HOST_WIDE_INT ix;
3875
        constructor_elt *ce;
3876
 
3877
        if (notify_temp_creation)
3878
          return GS_OK;
3879
 
3880
        /* Go ahead and simplify constant constructors to VECTOR_CST.  */
3881
        if (TREE_CONSTANT (ctor))
3882
          {
3883
            bool constant_p = true;
3884
            tree value;
3885
 
3886
            /* Even when ctor is constant, it might contain non-*_CST
3887
               elements, such as addresses or trapping values like
3888
               1.0/0.0 - 1.0/0.0.  Such expressions don't belong
3889
               in VECTOR_CST nodes.  */
3890
            FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3891
              if (!CONSTANT_CLASS_P (value))
3892
                {
3893
                  constant_p = false;
3894
                  break;
3895
                }
3896
 
3897
            if (constant_p)
3898
              {
3899
                TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3900
                break;
3901
              }
3902
 
3903
            /* Don't reduce an initializer constant even if we can't
3904
               make a VECTOR_CST.  It won't do anything for us, and it'll
3905
               prevent us from representing it as a single constant.  */
3906
            if (initializer_constant_valid_p (ctor, type))
3907
              break;
3908
 
3909
            TREE_CONSTANT (ctor) = 0;
3910
          }
3911
 
3912
        /* Vector types use CONSTRUCTOR all the way through gimple
3913
          compilation as a general initializer.  */
3914
        for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3915
          {
3916
            enum gimplify_status tret;
3917
            tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3918
                                  fb_rvalue);
3919
            if (tret == GS_ERROR)
3920
              ret = GS_ERROR;
3921
          }
3922
        if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3923
          TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3924
      }
3925
      break;
3926
 
3927
    default:
3928
      /* So how did we get a CONSTRUCTOR for a scalar type?  */
3929
      gcc_unreachable ();
3930
    }
3931
 
3932
  if (ret == GS_ERROR)
3933
    return GS_ERROR;
3934
  else if (want_value)
3935
    {
3936
      *expr_p = object;
3937
      return GS_OK;
3938
    }
3939
  else
3940
    {
3941
      /* If we have gimplified both sides of the initializer but have
3942
         not emitted an assignment, do so now.  */
3943
      if (*expr_p)
3944
        {
3945
          tree lhs = TREE_OPERAND (*expr_p, 0);
3946
          tree rhs = TREE_OPERAND (*expr_p, 1);
3947
          gimple init = gimple_build_assign (lhs, rhs);
3948
          gimplify_seq_add_stmt (pre_p, init);
3949
          *expr_p = NULL;
3950
        }
3951
 
3952
      return GS_ALL_DONE;
3953
    }
3954
}
3955
 
3956
/* Given a pointer value OP0, return a simplified version of an
3957
   indirection through OP0, or NULL_TREE if no simplification is
3958
   possible.  Note that the resulting type may be different from
3959
   the type pointed to in the sense that it is still compatible
3960
   from the langhooks point of view. */
3961
 
3962
tree
3963
gimple_fold_indirect_ref (tree t)
3964
{
3965
  tree type = TREE_TYPE (TREE_TYPE (t));
3966
  tree sub = t;
3967
  tree subtype;
3968
 
3969
  STRIP_NOPS (sub);
3970
  subtype = TREE_TYPE (sub);
3971
  if (!POINTER_TYPE_P (subtype))
3972
    return NULL_TREE;
3973
 
3974
  if (TREE_CODE (sub) == ADDR_EXPR)
3975
    {
3976
      tree op = TREE_OPERAND (sub, 0);
3977
      tree optype = TREE_TYPE (op);
3978
      /* *&p => p */
3979
      if (useless_type_conversion_p (type, optype))
3980
        return op;
3981
 
3982
      /* *(foo *)&fooarray => fooarray[0] */
3983
      if (TREE_CODE (optype) == ARRAY_TYPE
3984
          && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3985
          && useless_type_conversion_p (type, TREE_TYPE (optype)))
3986
       {
3987
         tree type_domain = TYPE_DOMAIN (optype);
3988
         tree min_val = size_zero_node;
3989
         if (type_domain && TYPE_MIN_VALUE (type_domain))
3990
           min_val = TYPE_MIN_VALUE (type_domain);
3991
         if (TREE_CODE (min_val) == INTEGER_CST)
3992
           return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3993
       }
3994
      /* *(foo *)&complexfoo => __real__ complexfoo */
3995
      else if (TREE_CODE (optype) == COMPLEX_TYPE
3996
               && useless_type_conversion_p (type, TREE_TYPE (optype)))
3997
        return fold_build1 (REALPART_EXPR, type, op);
3998
      /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3999
      else if (TREE_CODE (optype) == VECTOR_TYPE
4000
               && useless_type_conversion_p (type, TREE_TYPE (optype)))
4001
        {
4002
          tree part_width = TYPE_SIZE (type);
4003
          tree index = bitsize_int (0);
4004
          return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
4005
        }
4006
    }
4007
 
4008
  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
4009
  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4010
      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4011
    {
4012
      tree op00 = TREE_OPERAND (sub, 0);
4013
      tree op01 = TREE_OPERAND (sub, 1);
4014
      tree op00type;
4015
 
4016
      STRIP_NOPS (op00);
4017
      op00type = TREE_TYPE (op00);
4018
      if (TREE_CODE (op00) == ADDR_EXPR
4019
          && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
4020
          && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (op00type))))
4021
        {
4022
          HOST_WIDE_INT offset = tree_low_cst (op01, 0);
4023
          tree part_width = TYPE_SIZE (type);
4024
          unsigned HOST_WIDE_INT part_widthi
4025
            = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
4026
          unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
4027
          tree index = bitsize_int (indexi);
4028
          if (offset / part_widthi
4029
              <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
4030
            return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
4031
                                part_width, index);
4032
        }
4033
    }
4034
 
4035
  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
4036
  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4037
      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4038
    {
4039
      tree op00 = TREE_OPERAND (sub, 0);
4040
      tree op01 = TREE_OPERAND (sub, 1);
4041
      tree op00type;
4042
 
4043
      STRIP_NOPS (op00);
4044
      op00type = TREE_TYPE (op00);
4045
      if (TREE_CODE (op00) == ADDR_EXPR
4046
          && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
4047
          && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (op00type))))
4048
        {
4049
          tree size = TYPE_SIZE_UNIT (type);
4050
          if (tree_int_cst_equal (size, op01))
4051
            return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
4052
        }
4053
    }
4054
 
4055
  /* *(foo *)fooarrptr => (*fooarrptr)[0] */
4056
  if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
4057
      && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
4058
      && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
4059
    {
4060
      tree type_domain;
4061
      tree min_val = size_zero_node;
4062
      tree osub = sub;
4063
      sub = gimple_fold_indirect_ref (sub);
4064
      if (! sub)
4065
        sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
4066
      type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
4067
      if (type_domain && TYPE_MIN_VALUE (type_domain))
4068
        min_val = TYPE_MIN_VALUE (type_domain);
4069
      if (TREE_CODE (min_val) == INTEGER_CST)
4070
        return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
4071
    }
4072
 
4073
  return NULL_TREE;
4074
}
4075
 
4076
/* Given a pointer value OP0, return a simplified version of an
4077
   indirection through OP0, or NULL_TREE if no simplification is
4078
   possible.  This may only be applied to a rhs of an expression.
4079
   Note that the resulting type may be different from the type pointed
4080
   to in the sense that it is still compatible from the langhooks
4081
   point of view. */
4082
 
4083
static tree
4084
gimple_fold_indirect_ref_rhs (tree t)
4085
{
4086
  return gimple_fold_indirect_ref (t);
4087
}
4088
 
4089
/* Subroutine of gimplify_modify_expr to do simplifications of
4090
   MODIFY_EXPRs based on the code of the RHS.  We loop for as long as
4091
   something changes.  */
4092
 
4093
static enum gimplify_status
4094
gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4095
                          gimple_seq *pre_p, gimple_seq *post_p,
4096
                          bool want_value)
4097
{
4098
  enum gimplify_status ret = GS_UNHANDLED;
4099
  bool changed;
4100
 
4101
  do
4102
    {
4103
      changed = false;
4104
      switch (TREE_CODE (*from_p))
4105
        {
4106
        case VAR_DECL:
4107
          /* If we're assigning from a read-only variable initialized with
4108
             a constructor, do the direct assignment from the constructor,
4109
             but only if neither source nor target are volatile since this
4110
             latter assignment might end up being done on a per-field basis.  */
4111
          if (DECL_INITIAL (*from_p)
4112
              && TREE_READONLY (*from_p)
4113
              && !TREE_THIS_VOLATILE (*from_p)
4114
              && !TREE_THIS_VOLATILE (*to_p)
4115
              && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4116
            {
4117
              tree old_from = *from_p;
4118
              enum gimplify_status subret;
4119
 
4120
              /* Move the constructor into the RHS.  */
4121
              *from_p = unshare_expr (DECL_INITIAL (*from_p));
4122
 
4123
              /* Let's see if gimplify_init_constructor will need to put
4124
                 it in memory.  */
4125
              subret = gimplify_init_constructor (expr_p, NULL, NULL,
4126
                                                  false, true);
4127
              if (subret == GS_ERROR)
4128
                {
4129
                  /* If so, revert the change.  */
4130
                  *from_p = old_from;
4131
                }
4132
              else
4133
                {
4134
                  ret = GS_OK;
4135
                  changed = true;
4136
                }
4137
            }
4138
          break;
4139
        case INDIRECT_REF:
4140
          {
4141
            /* If we have code like
4142
 
4143
             *(const A*)(A*)&x
4144
 
4145
             where the type of "x" is a (possibly cv-qualified variant
4146
             of "A"), treat the entire expression as identical to "x".
4147
             This kind of code arises in C++ when an object is bound
4148
             to a const reference, and if "x" is a TARGET_EXPR we want
4149
             to take advantage of the optimization below.  */
4150
            tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4151
            if (t)
4152
              {
4153
                *from_p = t;
4154
                ret = GS_OK;
4155
                changed = true;
4156
              }
4157
            break;
4158
          }
4159
 
4160
        case TARGET_EXPR:
4161
          {
4162
            /* If we are initializing something from a TARGET_EXPR, strip the
4163
               TARGET_EXPR and initialize it directly, if possible.  This can't
4164
               be done if the initializer is void, since that implies that the
4165
               temporary is set in some non-trivial way.
4166
 
4167
               ??? What about code that pulls out the temp and uses it
4168
               elsewhere? I think that such code never uses the TARGET_EXPR as
4169
               an initializer.  If I'm wrong, we'll die because the temp won't
4170
               have any RTL.  In that case, I guess we'll need to replace
4171
               references somehow.  */
4172
            tree init = TARGET_EXPR_INITIAL (*from_p);
4173
 
4174
            if (init
4175
                && !VOID_TYPE_P (TREE_TYPE (init)))
4176
              {
4177
                *from_p = init;
4178
                ret = GS_OK;
4179
                changed = true;
4180
              }
4181
          }
4182
          break;
4183
 
4184
        case COMPOUND_EXPR:
4185
          /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4186
             caught.  */
4187
          gimplify_compound_expr (from_p, pre_p, true);
4188
          ret = GS_OK;
4189
          changed = true;
4190
          break;
4191
 
4192
        case CONSTRUCTOR:
4193
          /* If we're initializing from a CONSTRUCTOR, break this into
4194
             individual MODIFY_EXPRs.  */
4195
          return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4196
                                            false);
4197
 
4198
        case COND_EXPR:
4199
          /* If we're assigning to a non-register type, push the assignment
4200
             down into the branches.  This is mandatory for ADDRESSABLE types,
4201
             since we cannot generate temporaries for such, but it saves a
4202
             copy in other cases as well.  */
4203
          if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4204
            {
4205
              /* This code should mirror the code in gimplify_cond_expr. */
4206
              enum tree_code code = TREE_CODE (*expr_p);
4207
              tree cond = *from_p;
4208
              tree result = *to_p;
4209
 
4210
              ret = gimplify_expr (&result, pre_p, post_p,
4211
                                   is_gimple_lvalue, fb_lvalue);
4212
              if (ret != GS_ERROR)
4213
                ret = GS_OK;
4214
 
4215
              if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4216
                TREE_OPERAND (cond, 1)
4217
                  = build2 (code, void_type_node, result,
4218
                            TREE_OPERAND (cond, 1));
4219
              if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4220
                TREE_OPERAND (cond, 2)
4221
                  = build2 (code, void_type_node, unshare_expr (result),
4222
                            TREE_OPERAND (cond, 2));
4223
 
4224
              TREE_TYPE (cond) = void_type_node;
4225
              recalculate_side_effects (cond);
4226
 
4227
              if (want_value)
4228
                {
4229
                  gimplify_and_add (cond, pre_p);
4230
                  *expr_p = unshare_expr (result);
4231
                }
4232
              else
4233
                *expr_p = cond;
4234
              return ret;
4235
            }
4236
          break;
4237
 
4238
        case CALL_EXPR:
4239
          /* For calls that return in memory, give *to_p as the CALL_EXPR's
4240
             return slot so that we don't generate a temporary.  */
4241
          if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4242
              && aggregate_value_p (*from_p, *from_p))
4243
            {
4244
              bool use_target;
4245
 
4246
              if (!(rhs_predicate_for (*to_p))(*from_p))
4247
                /* If we need a temporary, *to_p isn't accurate.  */
4248
                use_target = false;
4249
              else if (TREE_CODE (*to_p) == RESULT_DECL
4250
                       && DECL_NAME (*to_p) == NULL_TREE
4251
                       && needs_to_live_in_memory (*to_p))
4252
                /* It's OK to use the return slot directly unless it's an NRV. */
4253
                use_target = true;
4254
              else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4255
                       || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4256
                /* Don't force regs into memory.  */
4257
                use_target = false;
4258
              else if (TREE_CODE (*expr_p) == INIT_EXPR)
4259
                /* It's OK to use the target directly if it's being
4260
                   initialized. */
4261
                use_target = true;
4262
              else if (!is_gimple_non_addressable (*to_p))
4263
                /* Don't use the original target if it's already addressable;
4264
                   if its address escapes, and the called function uses the
4265
                   NRV optimization, a conforming program could see *to_p
4266
                   change before the called function returns; see c++/19317.
4267
                   When optimizing, the return_slot pass marks more functions
4268
                   as safe after we have escape info.  */
4269
                use_target = false;
4270
              else
4271
                use_target = true;
4272
 
4273
              if (use_target)
4274
                {
4275
                  CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4276
                  mark_addressable (*to_p);
4277
                }
4278
            }
4279
          break;
4280
 
4281
          /* If we're initializing from a container, push the initialization
4282
             inside it.  */
4283
        case CLEANUP_POINT_EXPR:
4284
        case BIND_EXPR:
4285
        case STATEMENT_LIST:
4286
          {
4287
            tree wrap = *from_p;
4288
            tree t;
4289
 
4290
            ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4291
                                 fb_lvalue);
4292
            if (ret != GS_ERROR)
4293
              ret = GS_OK;
4294
 
4295
            t = voidify_wrapper_expr (wrap, *expr_p);
4296
            gcc_assert (t == *expr_p);
4297
 
4298
            if (want_value)
4299
              {
4300
                gimplify_and_add (wrap, pre_p);
4301
                *expr_p = unshare_expr (*to_p);
4302
              }
4303
            else
4304
              *expr_p = wrap;
4305
            return GS_OK;
4306
          }
4307
 
4308
        case COMPOUND_LITERAL_EXPR:
4309
          {
4310
            tree complit = TREE_OPERAND (*expr_p, 1);
4311
            tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4312
            tree decl = DECL_EXPR_DECL (decl_s);
4313
            tree init = DECL_INITIAL (decl);
4314
 
4315
            /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4316
               into struct T x = { 0, 1, 2 } if the address of the
4317
               compound literal has never been taken.  */
4318
            if (!TREE_ADDRESSABLE (complit)
4319
                && !TREE_ADDRESSABLE (decl)
4320
                && init)
4321
              {
4322
                *expr_p = copy_node (*expr_p);
4323
                TREE_OPERAND (*expr_p, 1) = init;
4324
                return GS_OK;
4325
              }
4326
          }
4327
 
4328
        default:
4329
          break;
4330
        }
4331
    }
4332
  while (changed);
4333
 
4334
  return ret;
4335
}
4336
 
4337
 
4338
/* Promote partial stores to COMPLEX variables to total stores.  *EXPR_P is
4339
   a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4340
   DECL_GIMPLE_REG_P set.
4341
 
4342
   IMPORTANT NOTE: This promotion is performed by introducing a load of the
4343
   other, unmodified part of the complex object just before the total store.
4344
   As a consequence, if the object is still uninitialized, an undefined value
4345
   will be loaded into a register, which may result in a spurious exception
4346
   if the register is floating-point and the value happens to be a signaling
4347
   NaN for example.  Then the fully-fledged complex operations lowering pass
4348
   followed by a DCE pass are necessary in order to fix things up.  */
4349
 
4350
static enum gimplify_status
4351
gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4352
                                   bool want_value)
4353
{
4354
  enum tree_code code, ocode;
4355
  tree lhs, rhs, new_rhs, other, realpart, imagpart;
4356
 
4357
  lhs = TREE_OPERAND (*expr_p, 0);
4358
  rhs = TREE_OPERAND (*expr_p, 1);
4359
  code = TREE_CODE (lhs);
4360
  lhs = TREE_OPERAND (lhs, 0);
4361
 
4362
  ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4363
  other = build1 (ocode, TREE_TYPE (rhs), lhs);
4364
  other = get_formal_tmp_var (other, pre_p);
4365
 
4366
  realpart = code == REALPART_EXPR ? rhs : other;
4367
  imagpart = code == REALPART_EXPR ? other : rhs;
4368
 
4369
  if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4370
    new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4371
  else
4372
    new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4373
 
4374
  gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4375
  *expr_p = (want_value) ? rhs : NULL_TREE;
4376
 
4377
  return GS_ALL_DONE;
4378
}
4379
 
4380
 
4381
/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4382
 
4383
      modify_expr
4384
              : varname '=' rhs
4385
              | '*' ID '=' rhs
4386
 
4387
    PRE_P points to the list where side effects that must happen before
4388
        *EXPR_P should be stored.
4389
 
4390
    POST_P points to the list where side effects that must happen after
4391
        *EXPR_P should be stored.
4392
 
4393
    WANT_VALUE is nonzero iff we want to use the value of this expression
4394
        in another expression.  */
4395
 
4396
static enum gimplify_status
4397
gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4398
                      bool want_value)
4399
{
4400
  tree *from_p = &TREE_OPERAND (*expr_p, 1);
4401
  tree *to_p = &TREE_OPERAND (*expr_p, 0);
4402
  enum gimplify_status ret = GS_UNHANDLED;
4403
  gimple assign;
4404
  location_t loc = EXPR_LOCATION (*expr_p);
4405
 
4406
  gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4407
              || TREE_CODE (*expr_p) == INIT_EXPR);
4408
 
4409
  /* Insert pointer conversions required by the middle-end that are not
4410
     required by the frontend.  This fixes middle-end type checking for
4411
     for example gcc.dg/redecl-6.c.  */
4412
  if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4413
    {
4414
      STRIP_USELESS_TYPE_CONVERSION (*from_p);
4415
      if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4416
        *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4417
    }
4418
 
4419
  /* See if any simplifications can be done based on what the RHS is.  */
4420
  ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4421
                                  want_value);
4422
  if (ret != GS_UNHANDLED)
4423
    return ret;
4424
 
4425
  /* For zero sized types only gimplify the left hand side and right hand
4426
     side as statements and throw away the assignment.  Do this after
4427
     gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4428
     types properly.  */
4429
  if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4430
    {
4431
      gimplify_stmt (from_p, pre_p);
4432
      gimplify_stmt (to_p, pre_p);
4433
      *expr_p = NULL_TREE;
4434
      return GS_ALL_DONE;
4435
    }
4436
 
4437
  /* If the value being copied is of variable width, compute the length
4438
     of the copy into a WITH_SIZE_EXPR.   Note that we need to do this
4439
     before gimplifying any of the operands so that we can resolve any
4440
     PLACEHOLDER_EXPRs in the size.  Also note that the RTL expander uses
4441
     the size of the expression to be copied, not of the destination, so
4442
     that is what we must do here.  */
4443
  maybe_with_size_expr (from_p);
4444
 
4445
  ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4446
  if (ret == GS_ERROR)
4447
    return ret;
4448
 
4449
  /* As a special case, we have to temporarily allow for assignments
4450
     with a CALL_EXPR on the RHS.  Since in GIMPLE a function call is
4451
     a toplevel statement, when gimplifying the GENERIC expression
4452
     MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4453
     GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4454
 
4455
     Instead, we need to create the tuple GIMPLE_CALL <a, foo>.  To
4456
     prevent gimplify_expr from trying to create a new temporary for
4457
     foo's LHS, we tell it that it should only gimplify until it
4458
     reaches the CALL_EXPR.  On return from gimplify_expr, the newly
4459
     created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4460
     and all we need to do here is set 'a' to be its LHS.  */
4461
  ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4462
                       fb_rvalue);
4463
  if (ret == GS_ERROR)
4464
    return ret;
4465
 
4466
  /* Now see if the above changed *from_p to something we handle specially.  */
4467
  ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4468
                                  want_value);
4469
  if (ret != GS_UNHANDLED)
4470
    return ret;
4471
 
4472
  /* If we've got a variable sized assignment between two lvalues (i.e. does
4473
     not involve a call), then we can make things a bit more straightforward
4474
     by converting the assignment to memcpy or memset.  */
4475
  if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4476
    {
4477
      tree from = TREE_OPERAND (*from_p, 0);
4478
      tree size = TREE_OPERAND (*from_p, 1);
4479
 
4480
      if (TREE_CODE (from) == CONSTRUCTOR)
4481
        return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4482
 
4483
      if (is_gimple_addressable (from))
4484
        {
4485
          *from_p = from;
4486
          return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4487
                                                 pre_p);
4488
        }
4489
    }
4490
 
4491
  /* Transform partial stores to non-addressable complex variables into
4492
     total stores.  This allows us to use real instead of virtual operands
4493
     for these variables, which improves optimization.  */
4494
  if ((TREE_CODE (*to_p) == REALPART_EXPR
4495
       || TREE_CODE (*to_p) == IMAGPART_EXPR)
4496
      && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4497
    return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4498
 
4499
  /* Try to alleviate the effects of the gimplification creating artificial
4500
     temporaries (see for example is_gimple_reg_rhs) on the debug info.  */
4501
  if (!gimplify_ctxp->into_ssa
4502
      && DECL_P (*from_p)
4503
      && DECL_IGNORED_P (*from_p)
4504
      && DECL_P (*to_p)
4505
      && !DECL_IGNORED_P (*to_p))
4506
    {
4507
      if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4508
        DECL_NAME (*from_p)
4509
          = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4510
      DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4511
      SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4512
   }
4513
 
4514
  if (TREE_CODE (*from_p) == CALL_EXPR)
4515
    {
4516
      /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4517
         instead of a GIMPLE_ASSIGN.  */
4518
      assign = gimple_build_call_from_tree (*from_p);
4519
      if (!gimple_call_noreturn_p (assign))
4520
        gimple_call_set_lhs (assign, *to_p);
4521
    }
4522
  else
4523
    {
4524
      assign = gimple_build_assign (*to_p, *from_p);
4525
      gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4526
    }
4527
 
4528
  gimplify_seq_add_stmt (pre_p, assign);
4529
 
4530
  if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4531
    {
4532
      /* If we've somehow already got an SSA_NAME on the LHS, then
4533
         we've probably modified it twice.  Not good.  */
4534
      gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4535
      *to_p = make_ssa_name (*to_p, assign);
4536
      gimple_set_lhs (assign, *to_p);
4537
    }
4538
 
4539
  if (want_value)
4540
    {
4541
      *expr_p = unshare_expr (*to_p);
4542
      return GS_OK;
4543
    }
4544
  else
4545
    *expr_p = NULL;
4546
 
4547
  return GS_ALL_DONE;
4548
}
4549
 
4550
/*  Gimplify a comparison between two variable-sized objects.  Do this
4551
    with a call to BUILT_IN_MEMCMP.  */
4552
 
4553
static enum gimplify_status
4554
gimplify_variable_sized_compare (tree *expr_p)
4555
{
4556
  tree op0 = TREE_OPERAND (*expr_p, 0);
4557
  tree op1 = TREE_OPERAND (*expr_p, 1);
4558
  tree t, arg, dest, src;
4559
  location_t loc = EXPR_LOCATION (*expr_p);
4560
 
4561
  arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4562
  arg = unshare_expr (arg);
4563
  arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4564
  src = build_fold_addr_expr_loc (loc, op1);
4565
  dest = build_fold_addr_expr_loc (loc, op0);
4566
  t = implicit_built_in_decls[BUILT_IN_MEMCMP];
4567
  t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4568
  *expr_p
4569
    = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4570
 
4571
  return GS_OK;
4572
}
4573
 
4574
/*  Gimplify a comparison between two aggregate objects of integral scalar
4575
    mode as a comparison between the bitwise equivalent scalar values.  */
4576
 
4577
static enum gimplify_status
4578
gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4579
{
4580
  location_t loc = EXPR_LOCATION (*expr_p);
4581
  tree op0 = TREE_OPERAND (*expr_p, 0);
4582
  tree op1 = TREE_OPERAND (*expr_p, 1);
4583
 
4584
  tree type = TREE_TYPE (op0);
4585
  tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4586
 
4587
  op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4588
  op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4589
 
4590
  *expr_p
4591
    = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4592
 
4593
  return GS_OK;
4594
}
4595
 
4596
/*  Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions.  EXPR_P
4597
    points to the expression to gimplify.
4598
 
4599
    Expressions of the form 'a && b' are gimplified to:
4600
 
4601
        a && b ? true : false
4602
 
4603
    LOCUS is the source location to be put on the generated COND_EXPR.
4604
    gimplify_cond_expr will do the rest.  */
4605
 
4606
static enum gimplify_status
4607
gimplify_boolean_expr (tree *expr_p, location_t locus)
4608
{
4609
  /* Preserve the original type of the expression.  */
4610
  tree type = TREE_TYPE (*expr_p);
4611
 
4612
  *expr_p = build3 (COND_EXPR, type, *expr_p,
4613
                    fold_convert_loc (locus, type, boolean_true_node),
4614
                    fold_convert_loc (locus, type, boolean_false_node));
4615
 
4616
  SET_EXPR_LOCATION (*expr_p, locus);
4617
 
4618
  return GS_OK;
4619
}
4620
 
4621
/* Gimplifies an expression sequence.  This function gimplifies each
4622
   expression and re-writes the original expression with the last
4623
   expression of the sequence in GIMPLE form.
4624
 
4625
   PRE_P points to the list where the side effects for all the
4626
       expressions in the sequence will be emitted.
4627
 
4628
   WANT_VALUE is true when the result of the last COMPOUND_EXPR is used.  */
4629
 
4630
static enum gimplify_status
4631
gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4632
{
4633
  tree t = *expr_p;
4634
 
4635
  do
4636
    {
4637
      tree *sub_p = &TREE_OPERAND (t, 0);
4638
 
4639
      if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4640
        gimplify_compound_expr (sub_p, pre_p, false);
4641
      else
4642
        gimplify_stmt (sub_p, pre_p);
4643
 
4644
      t = TREE_OPERAND (t, 1);
4645
    }
4646
  while (TREE_CODE (t) == COMPOUND_EXPR);
4647
 
4648
  *expr_p = t;
4649
  if (want_value)
4650
    return GS_OK;
4651
  else
4652
    {
4653
      gimplify_stmt (expr_p, pre_p);
4654
      return GS_ALL_DONE;
4655
    }
4656
}
4657
 
4658
 
4659
/* Gimplify a SAVE_EXPR node.  EXPR_P points to the expression to
4660
   gimplify.  After gimplification, EXPR_P will point to a new temporary
4661
   that holds the original value of the SAVE_EXPR node.
4662
 
4663
   PRE_P points to the list where side effects that must happen before
4664
      *EXPR_P should be stored.  */
4665
 
4666
static enum gimplify_status
4667
gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4668
{
4669
  enum gimplify_status ret = GS_ALL_DONE;
4670
  tree val;
4671
 
4672
  gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4673
  val = TREE_OPERAND (*expr_p, 0);
4674
 
4675
  /* If the SAVE_EXPR has not been resolved, then evaluate it once.  */
4676
  if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4677
    {
4678
      /* The operand may be a void-valued expression such as SAVE_EXPRs
4679
         generated by the Java frontend for class initialization.  It is
4680
         being executed only for its side-effects.  */
4681
      if (TREE_TYPE (val) == void_type_node)
4682
        {
4683
          ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4684
                               is_gimple_stmt, fb_none);
4685
          val = NULL;
4686
        }
4687
      else
4688
        val = get_initialized_tmp_var (val, pre_p, post_p);
4689
 
4690
      TREE_OPERAND (*expr_p, 0) = val;
4691
      SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4692
    }
4693
 
4694
  *expr_p = val;
4695
 
4696
  return ret;
4697
}
4698
 
4699
/*  Re-write the ADDR_EXPR node pointed to by EXPR_P
4700
 
4701
      unary_expr
4702
              : ...
4703
              | '&' varname
4704
              ...
4705
 
4706
    PRE_P points to the list where side effects that must happen before
4707
        *EXPR_P should be stored.
4708
 
4709
    POST_P points to the list where side effects that must happen after
4710
        *EXPR_P should be stored.  */
4711
 
4712
static enum gimplify_status
4713
gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4714
{
4715
  tree expr = *expr_p;
4716
  tree op0 = TREE_OPERAND (expr, 0);
4717
  enum gimplify_status ret;
4718
  location_t loc = EXPR_LOCATION (*expr_p);
4719
 
4720
  switch (TREE_CODE (op0))
4721
    {
4722
    case INDIRECT_REF:
4723
    case MISALIGNED_INDIRECT_REF:
4724
    do_indirect_ref:
4725
      /* Check if we are dealing with an expression of the form '&*ptr'.
4726
         While the front end folds away '&*ptr' into 'ptr', these
4727
         expressions may be generated internally by the compiler (e.g.,
4728
         builtins like __builtin_va_end).  */
4729
      /* Caution: the silent array decomposition semantics we allow for
4730
         ADDR_EXPR means we can't always discard the pair.  */
4731
      /* Gimplification of the ADDR_EXPR operand may drop
4732
         cv-qualification conversions, so make sure we add them if
4733
         needed.  */
4734
      {
4735
        tree op00 = TREE_OPERAND (op0, 0);
4736
        tree t_expr = TREE_TYPE (expr);
4737
        tree t_op00 = TREE_TYPE (op00);
4738
 
4739
        if (!useless_type_conversion_p (t_expr, t_op00))
4740
          op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4741
        *expr_p = op00;
4742
        ret = GS_OK;
4743
      }
4744
      break;
4745
 
4746
    case VIEW_CONVERT_EXPR:
4747
      /* Take the address of our operand and then convert it to the type of
4748
         this ADDR_EXPR.
4749
 
4750
         ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4751
         all clear.  The impact of this transformation is even less clear.  */
4752
 
4753
      /* If the operand is a useless conversion, look through it.  Doing so
4754
         guarantees that the ADDR_EXPR and its operand will remain of the
4755
         same type.  */
4756
      if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4757
        op0 = TREE_OPERAND (op0, 0);
4758
 
4759
      *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4760
                                  build_fold_addr_expr_loc (loc,
4761
                                                        TREE_OPERAND (op0, 0)));
4762
      ret = GS_OK;
4763
      break;
4764
 
4765
    default:
4766
      /* We use fb_either here because the C frontend sometimes takes
4767
         the address of a call that returns a struct; see
4768
         gcc.dg/c99-array-lval-1.c.  The gimplifier will correctly make
4769
         the implied temporary explicit.  */
4770
 
4771
      /* Make the operand addressable.  */
4772
      ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4773
                           is_gimple_addressable, fb_either);
4774
      if (ret == GS_ERROR)
4775
        break;
4776
 
4777
      /* Then mark it.  Beware that it may not be possible to do so directly
4778
         if a temporary has been created by the gimplification.  */
4779
      prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4780
 
4781
      op0 = TREE_OPERAND (expr, 0);
4782
 
4783
      /* For various reasons, the gimplification of the expression
4784
         may have made a new INDIRECT_REF.  */
4785
      if (TREE_CODE (op0) == INDIRECT_REF)
4786
        goto do_indirect_ref;
4787
 
4788
      mark_addressable (TREE_OPERAND (expr, 0));
4789
 
4790
      /* The FEs may end up building ADDR_EXPRs early on a decl with
4791
         an incomplete type.  Re-build ADDR_EXPRs in canonical form
4792
         here.  */
4793
      if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4794
        *expr_p = build_fold_addr_expr (op0);
4795
 
4796
      /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly.  */
4797
      recompute_tree_invariant_for_addr_expr (*expr_p);
4798
 
4799
      /* If we re-built the ADDR_EXPR add a conversion to the original type
4800
         if required.  */
4801
      if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4802
        *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4803
 
4804
      break;
4805
    }
4806
 
4807
  return ret;
4808
}
4809
 
4810
/* Gimplify the operands of an ASM_EXPR.  Input operands should be a gimple
4811
   value; output operands should be a gimple lvalue.  */
4812
 
4813
static enum gimplify_status
4814
gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4815
{
4816
  tree expr;
4817
  int noutputs;
4818
  const char **oconstraints;
4819
  int i;
4820
  tree link;
4821
  const char *constraint;
4822
  bool allows_mem, allows_reg, is_inout;
4823
  enum gimplify_status ret, tret;
4824
  gimple stmt;
4825
  VEC(tree, gc) *inputs;
4826
  VEC(tree, gc) *outputs;
4827
  VEC(tree, gc) *clobbers;
4828
  VEC(tree, gc) *labels;
4829
  tree link_next;
4830
 
4831
  expr = *expr_p;
4832
  noutputs = list_length (ASM_OUTPUTS (expr));
4833
  oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4834
 
4835
  inputs = outputs = clobbers = labels = NULL;
4836
 
4837
  ret = GS_ALL_DONE;
4838
  link_next = NULL_TREE;
4839
  for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4840
    {
4841
      bool ok;
4842
      size_t constraint_len;
4843
 
4844
      link_next = TREE_CHAIN (link);
4845
 
4846
      oconstraints[i]
4847
        = constraint
4848
        = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4849
      constraint_len = strlen (constraint);
4850
      if (constraint_len == 0)
4851
        continue;
4852
 
4853
      ok = parse_output_constraint (&constraint, i, 0, 0,
4854
                                    &allows_mem, &allows_reg, &is_inout);
4855
      if (!ok)
4856
        {
4857
          ret = GS_ERROR;
4858
          is_inout = false;
4859
        }
4860
 
4861
      if (!allows_reg && allows_mem)
4862
        mark_addressable (TREE_VALUE (link));
4863
 
4864
      tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4865
                            is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4866
                            fb_lvalue | fb_mayfail);
4867
      if (tret == GS_ERROR)
4868
        {
4869
          error ("invalid lvalue in asm output %d", i);
4870
          ret = tret;
4871
        }
4872
 
4873
      VEC_safe_push (tree, gc, outputs, link);
4874
      TREE_CHAIN (link) = NULL_TREE;
4875
 
4876
      if (is_inout)
4877
        {
4878
          /* An input/output operand.  To give the optimizers more
4879
             flexibility, split it into separate input and output
4880
             operands.  */
4881
          tree input;
4882
          char buf[10];
4883
 
4884
          /* Turn the in/out constraint into an output constraint.  */
4885
          char *p = xstrdup (constraint);
4886
          p[0] = '=';
4887
          TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4888
 
4889
          /* And add a matching input constraint.  */
4890
          if (allows_reg)
4891
            {
4892
              sprintf (buf, "%d", i);
4893
 
4894
              /* If there are multiple alternatives in the constraint,
4895
                 handle each of them individually.  Those that allow register
4896
                 will be replaced with operand number, the others will stay
4897
                 unchanged.  */
4898
              if (strchr (p, ',') != NULL)
4899
                {
4900
                  size_t len = 0, buflen = strlen (buf);
4901
                  char *beg, *end, *str, *dst;
4902
 
4903
                  for (beg = p + 1;;)
4904
                    {
4905
                      end = strchr (beg, ',');
4906
                      if (end == NULL)
4907
                        end = strchr (beg, '\0');
4908
                      if ((size_t) (end - beg) < buflen)
4909
                        len += buflen + 1;
4910
                      else
4911
                        len += end - beg + 1;
4912
                      if (*end)
4913
                        beg = end + 1;
4914
                      else
4915
                        break;
4916
                    }
4917
 
4918
                  str = (char *) alloca (len);
4919
                  for (beg = p + 1, dst = str;;)
4920
                    {
4921
                      const char *tem;
4922
                      bool mem_p, reg_p, inout_p;
4923
 
4924
                      end = strchr (beg, ',');
4925
                      if (end)
4926
                        *end = '\0';
4927
                      beg[-1] = '=';
4928
                      tem = beg - 1;
4929
                      parse_output_constraint (&tem, i, 0, 0,
4930
                                               &mem_p, &reg_p, &inout_p);
4931
                      if (dst != str)
4932
                        *dst++ = ',';
4933
                      if (reg_p)
4934
                        {
4935
                          memcpy (dst, buf, buflen);
4936
                          dst += buflen;
4937
                        }
4938
                      else
4939
                        {
4940
                          if (end)
4941
                            len = end - beg;
4942
                          else
4943
                            len = strlen (beg);
4944
                          memcpy (dst, beg, len);
4945
                          dst += len;
4946
                        }
4947
                      if (end)
4948
                        beg = end + 1;
4949
                      else
4950
                        break;
4951
                    }
4952
                  *dst = '\0';
4953
                  input = build_string (dst - str, str);
4954
                }
4955
              else
4956
                input = build_string (strlen (buf), buf);
4957
            }
4958
          else
4959
            input = build_string (constraint_len - 1, constraint + 1);
4960
 
4961
          free (p);
4962
 
4963
          input = build_tree_list (build_tree_list (NULL_TREE, input),
4964
                                   unshare_expr (TREE_VALUE (link)));
4965
          ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4966
        }
4967
    }
4968
 
4969
  link_next = NULL_TREE;
4970
  for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
4971
    {
4972
      link_next = TREE_CHAIN (link);
4973
      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4974
      parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4975
                              oconstraints, &allows_mem, &allows_reg);
4976
 
4977
      /* If we can't make copies, we can only accept memory.  */
4978
      if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4979
        {
4980
          if (allows_mem)
4981
            allows_reg = 0;
4982
          else
4983
            {
4984
              error ("impossible constraint in %<asm%>");
4985
              error ("non-memory input %d must stay in memory", i);
4986
              return GS_ERROR;
4987
            }
4988
        }
4989
 
4990
      /* If the operand is a memory input, it should be an lvalue.  */
4991
      if (!allows_reg && allows_mem)
4992
        {
4993
          tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4994
                                is_gimple_lvalue, fb_lvalue | fb_mayfail);
4995
          mark_addressable (TREE_VALUE (link));
4996
          if (tret == GS_ERROR)
4997
            {
4998
              if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
4999
                input_location = EXPR_LOCATION (TREE_VALUE (link));
5000
              error ("memory input %d is not directly addressable", i);
5001
              ret = tret;
5002
            }
5003
        }
5004
      else
5005
        {
5006
          tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5007
                                is_gimple_asm_val, fb_rvalue);
5008
          if (tret == GS_ERROR)
5009
            ret = tret;
5010
        }
5011
 
5012
      TREE_CHAIN (link) = NULL_TREE;
5013
      VEC_safe_push (tree, gc, inputs, link);
5014
    }
5015
 
5016
  for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
5017
    VEC_safe_push (tree, gc, clobbers, link);
5018
 
5019
  for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link))
5020
    VEC_safe_push (tree, gc, labels, link);
5021
 
5022
  /* Do not add ASMs with errors to the gimple IL stream.  */
5023
  if (ret != GS_ERROR)
5024
    {
5025
      stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5026
                                   inputs, outputs, clobbers, labels);
5027
 
5028
      gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5029
      gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5030
 
5031
      gimplify_seq_add_stmt (pre_p, stmt);
5032
    }
5033
 
5034
  return ret;
5035
}
5036
 
5037
/* Gimplify a CLEANUP_POINT_EXPR.  Currently this works by adding
5038
   GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5039
   gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5040
   return to this function.
5041
 
5042
   FIXME should we complexify the prequeue handling instead?  Or use flags
5043
   for all the cleanups and let the optimizer tighten them up?  The current
5044
   code seems pretty fragile; it will break on a cleanup within any
5045
   non-conditional nesting.  But any such nesting would be broken, anyway;
5046
   we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5047
   and continues out of it.  We can do that at the RTL level, though, so
5048
   having an optimizer to tighten up try/finally regions would be a Good
5049
   Thing.  */
5050
 
5051
static enum gimplify_status
5052
gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5053
{
5054
  gimple_stmt_iterator iter;
5055
  gimple_seq body_sequence = NULL;
5056
 
5057
  tree temp = voidify_wrapper_expr (*expr_p, NULL);
5058
 
5059
  /* We only care about the number of conditions between the innermost
5060
     CLEANUP_POINT_EXPR and the cleanup.  So save and reset the count and
5061
     any cleanups collected outside the CLEANUP_POINT_EXPR.  */
5062
  int old_conds = gimplify_ctxp->conditions;
5063
  gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5064
  gimplify_ctxp->conditions = 0;
5065
  gimplify_ctxp->conditional_cleanups = NULL;
5066
 
5067
  gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5068
 
5069
  gimplify_ctxp->conditions = old_conds;
5070
  gimplify_ctxp->conditional_cleanups = old_cleanups;
5071
 
5072
  for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5073
    {
5074
      gimple wce = gsi_stmt (iter);
5075
 
5076
      if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5077
        {
5078
          if (gsi_one_before_end_p (iter))
5079
            {
5080
              /* Note that gsi_insert_seq_before and gsi_remove do not
5081
                 scan operands, unlike some other sequence mutators.  */
5082
              gsi_insert_seq_before_without_update (&iter,
5083
                                                    gimple_wce_cleanup (wce),
5084
                                                    GSI_SAME_STMT);
5085
              gsi_remove (&iter, true);
5086
              break;
5087
            }
5088
          else
5089
            {
5090
              gimple gtry;
5091
              gimple_seq seq;
5092
              enum gimple_try_flags kind;
5093
 
5094
              if (gimple_wce_cleanup_eh_only (wce))
5095
                kind = GIMPLE_TRY_CATCH;
5096
              else
5097
                kind = GIMPLE_TRY_FINALLY;
5098
              seq = gsi_split_seq_after (iter);
5099
 
5100
              gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5101
              /* Do not use gsi_replace here, as it may scan operands.
5102
                 We want to do a simple structural modification only.  */
5103
              *gsi_stmt_ptr (&iter) = gtry;
5104
              iter = gsi_start (seq);
5105
            }
5106
        }
5107
      else
5108
        gsi_next (&iter);
5109
    }
5110
 
5111
  gimplify_seq_add_seq (pre_p, body_sequence);
5112
  if (temp)
5113
    {
5114
      *expr_p = temp;
5115
      return GS_OK;
5116
    }
5117
  else
5118
    {
5119
      *expr_p = NULL;
5120
      return GS_ALL_DONE;
5121
    }
5122
}
5123
 
5124
/* Insert a cleanup marker for gimplify_cleanup_point_expr.  CLEANUP
5125
   is the cleanup action required.  EH_ONLY is true if the cleanup should
5126
   only be executed if an exception is thrown, not on normal exit.  */
5127
 
5128
static void
5129
gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5130
{
5131
  gimple wce;
5132
  gimple_seq cleanup_stmts = NULL;
5133
 
5134
  /* Errors can result in improperly nested cleanups.  Which results in
5135
     confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR.  */
5136
  if (errorcount || sorrycount)
5137
    return;
5138
 
5139
  if (gimple_conditional_context ())
5140
    {
5141
      /* If we're in a conditional context, this is more complex.  We only
5142
         want to run the cleanup if we actually ran the initialization that
5143
         necessitates it, but we want to run it after the end of the
5144
         conditional context.  So we wrap the try/finally around the
5145
         condition and use a flag to determine whether or not to actually
5146
         run the destructor.  Thus
5147
 
5148
           test ? f(A()) : 0
5149
 
5150
         becomes (approximately)
5151
 
5152
           flag = 0;
5153
           try {
5154
             if (test) { A::A(temp); flag = 1; val = f(temp); }
5155
             else { val = 0; }
5156
           } finally {
5157
             if (flag) A::~A(temp);
5158
           }
5159
           val
5160
      */
5161
      tree flag = create_tmp_var (boolean_type_node, "cleanup");
5162
      gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5163
      gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5164
 
5165
      cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5166
      gimplify_stmt (&cleanup, &cleanup_stmts);
5167
      wce = gimple_build_wce (cleanup_stmts);
5168
 
5169
      gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5170
      gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5171
      gimplify_seq_add_stmt (pre_p, ftrue);
5172
 
5173
      /* Because of this manipulation, and the EH edges that jump
5174
         threading cannot redirect, the temporary (VAR) will appear
5175
         to be used uninitialized.  Don't warn.  */
5176
      TREE_NO_WARNING (var) = 1;
5177
    }
5178
  else
5179
    {
5180
      gimplify_stmt (&cleanup, &cleanup_stmts);
5181
      wce = gimple_build_wce (cleanup_stmts);
5182
      gimple_wce_set_cleanup_eh_only (wce, eh_only);
5183
      gimplify_seq_add_stmt (pre_p, wce);
5184
    }
5185
}
5186
 
5187
/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR.  */
5188
 
5189
static enum gimplify_status
5190
gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5191
{
5192
  tree targ = *expr_p;
5193
  tree temp = TARGET_EXPR_SLOT (targ);
5194
  tree init = TARGET_EXPR_INITIAL (targ);
5195
  enum gimplify_status ret;
5196
 
5197
  if (init)
5198
    {
5199
      /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5200
         to the temps list.  Handle also variable length TARGET_EXPRs.  */
5201
      if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5202
        {
5203
          if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5204
            gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5205
          gimplify_vla_decl (temp, pre_p);
5206
        }
5207
      else
5208
        gimple_add_tmp_var (temp);
5209
 
5210
      /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5211
         expression is supposed to initialize the slot.  */
5212
      if (VOID_TYPE_P (TREE_TYPE (init)))
5213
        ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5214
      else
5215
        {
5216
          tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5217
          init = init_expr;
5218
          ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5219
          init = NULL;
5220
          ggc_free (init_expr);
5221
        }
5222
      if (ret == GS_ERROR)
5223
        {
5224
          /* PR c++/28266 Make sure this is expanded only once. */
5225
          TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5226
          return GS_ERROR;
5227
        }
5228
      if (init)
5229
        gimplify_and_add (init, pre_p);
5230
 
5231
      /* If needed, push the cleanup for the temp.  */
5232
      if (TARGET_EXPR_CLEANUP (targ))
5233
        gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5234
                             CLEANUP_EH_ONLY (targ), pre_p);
5235
 
5236
      /* Only expand this once.  */
5237
      TREE_OPERAND (targ, 3) = init;
5238
      TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5239
    }
5240
  else
5241
    /* We should have expanded this before.  */
5242
    gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5243
 
5244
  *expr_p = temp;
5245
  return GS_OK;
5246
}
5247
 
5248
/* Gimplification of expression trees.  */
5249
 
5250
/* Gimplify an expression which appears at statement context.  The
5251
   corresponding GIMPLE statements are added to *SEQ_P.  If *SEQ_P is
5252
   NULL, a new sequence is allocated.
5253
 
5254
   Return true if we actually added a statement to the queue.  */
5255
 
5256
bool
5257
gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5258
{
5259
  gimple_seq_node last;
5260
 
5261
  if (!*seq_p)
5262
    *seq_p = gimple_seq_alloc ();
5263
 
5264
  last = gimple_seq_last (*seq_p);
5265
  gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5266
  return last != gimple_seq_last (*seq_p);
5267
}
5268
 
5269
 
5270
/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5271
   to CTX.  If entries already exist, force them to be some flavor of private.
5272
   If there is no enclosing parallel, do nothing.  */
5273
 
5274
void
5275
omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5276
{
5277
  splay_tree_node n;
5278
 
5279
  if (decl == NULL || !DECL_P (decl))
5280
    return;
5281
 
5282
  do
5283
    {
5284
      n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5285
      if (n != NULL)
5286
        {
5287
          if (n->value & GOVD_SHARED)
5288
            n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5289
          else
5290
            return;
5291
        }
5292
      else if (ctx->region_type != ORT_WORKSHARE)
5293
        omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5294
 
5295
      ctx = ctx->outer_context;
5296
    }
5297
  while (ctx);
5298
}
5299
 
5300
/* Similarly for each of the type sizes of TYPE.  */
5301
 
5302
static void
5303
omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5304
{
5305
  if (type == NULL || type == error_mark_node)
5306
    return;
5307
  type = TYPE_MAIN_VARIANT (type);
5308
 
5309
  if (pointer_set_insert (ctx->privatized_types, type))
5310
    return;
5311
 
5312
  switch (TREE_CODE (type))
5313
    {
5314
    case INTEGER_TYPE:
5315
    case ENUMERAL_TYPE:
5316
    case BOOLEAN_TYPE:
5317
    case REAL_TYPE:
5318
    case FIXED_POINT_TYPE:
5319
      omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5320
      omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5321
      break;
5322
 
5323
    case ARRAY_TYPE:
5324
      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5325
      omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5326
      break;
5327
 
5328
    case RECORD_TYPE:
5329
    case UNION_TYPE:
5330
    case QUAL_UNION_TYPE:
5331
      {
5332
        tree field;
5333
        for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5334
          if (TREE_CODE (field) == FIELD_DECL)
5335
            {
5336
              omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5337
              omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5338
            }
5339
      }
5340
      break;
5341
 
5342
    case POINTER_TYPE:
5343
    case REFERENCE_TYPE:
5344
      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5345
      break;
5346
 
5347
    default:
5348
      break;
5349
    }
5350
 
5351
  omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5352
  omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5353
  lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5354
}
5355
 
5356
/* Add an entry for DECL in the OpenMP context CTX with FLAGS.  */
5357
 
5358
static void
5359
omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5360
{
5361
  splay_tree_node n;
5362
  unsigned int nflags;
5363
  tree t;
5364
 
5365
  if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5366
    return;
5367
 
5368
  /* Never elide decls whose type has TREE_ADDRESSABLE set.  This means
5369
     there are constructors involved somewhere.  */
5370
  if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5371
      || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5372
    flags |= GOVD_SEEN;
5373
 
5374
  n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5375
  if (n != NULL)
5376
    {
5377
      /* We shouldn't be re-adding the decl with the same data
5378
         sharing class.  */
5379
      gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5380
      /* The only combination of data sharing classes we should see is
5381
         FIRSTPRIVATE and LASTPRIVATE.  */
5382
      nflags = n->value | flags;
5383
      gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5384
                  == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5385
      n->value = nflags;
5386
      return;
5387
    }
5388
 
5389
  /* When adding a variable-sized variable, we have to handle all sorts
5390
     of additional bits of data: the pointer replacement variable, and
5391
     the parameters of the type.  */
5392
  if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5393
    {
5394
      /* Add the pointer replacement variable as PRIVATE if the variable
5395
         replacement is private, else FIRSTPRIVATE since we'll need the
5396
         address of the original variable either for SHARED, or for the
5397
         copy into or out of the context.  */
5398
      if (!(flags & GOVD_LOCAL))
5399
        {
5400
          nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5401
          nflags |= flags & GOVD_SEEN;
5402
          t = DECL_VALUE_EXPR (decl);
5403
          gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5404
          t = TREE_OPERAND (t, 0);
5405
          gcc_assert (DECL_P (t));
5406
          omp_add_variable (ctx, t, nflags);
5407
        }
5408
 
5409
      /* Add all of the variable and type parameters (which should have
5410
         been gimplified to a formal temporary) as FIRSTPRIVATE.  */
5411
      omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5412
      omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5413
      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5414
 
5415
      /* The variable-sized variable itself is never SHARED, only some form
5416
         of PRIVATE.  The sharing would take place via the pointer variable
5417
         which we remapped above.  */
5418
      if (flags & GOVD_SHARED)
5419
        flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5420
                | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5421
 
5422
      /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5423
         alloca statement we generate for the variable, so make sure it
5424
         is available.  This isn't automatically needed for the SHARED
5425
         case, since we won't be allocating local storage then.
5426
         For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5427
         in this case omp_notice_variable will be called later
5428
         on when it is gimplified.  */
5429
      else if (! (flags & GOVD_LOCAL))
5430
        omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5431
    }
5432
  else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5433
    {
5434
      gcc_assert ((flags & GOVD_LOCAL) == 0);
5435
      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5436
 
5437
      /* Similar to the direct variable sized case above, we'll need the
5438
         size of references being privatized.  */
5439
      if ((flags & GOVD_SHARED) == 0)
5440
        {
5441
          t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5442
          if (TREE_CODE (t) != INTEGER_CST)
5443
            omp_notice_variable (ctx, t, true);
5444
        }
5445
    }
5446
 
5447
  splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5448
}
5449
 
5450
/* Notice a threadprivate variable DECL used in OpenMP context CTX.
5451
   This just prints out diagnostics about threadprivate variable uses
5452
   in untied tasks.  If DECL2 is non-NULL, prevent this warning
5453
   on that variable.  */
5454
 
5455
static bool
5456
omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5457
                                   tree decl2)
5458
{
5459
  splay_tree_node n;
5460
 
5461
  if (ctx->region_type != ORT_UNTIED_TASK)
5462
    return false;
5463
  n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5464
  if (n == NULL)
5465
    {
5466
      error ("threadprivate variable %qE used in untied task", DECL_NAME (decl));
5467
      error_at (ctx->location, "enclosing task");
5468
      splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5469
    }
5470
  if (decl2)
5471
    splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5472
  return false;
5473
}
5474
 
5475
/* Record the fact that DECL was used within the OpenMP context CTX.
5476
   IN_CODE is true when real code uses DECL, and false when we should
5477
   merely emit default(none) errors.  Return true if DECL is going to
5478
   be remapped and thus DECL shouldn't be gimplified into its
5479
   DECL_VALUE_EXPR (if any).  */
5480
 
5481
static bool
5482
omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5483
{
5484
  splay_tree_node n;
5485
  unsigned flags = in_code ? GOVD_SEEN : 0;
5486
  bool ret = false, shared;
5487
 
5488
  if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5489
    return false;
5490
 
5491
  /* Threadprivate variables are predetermined.  */
5492
  if (is_global_var (decl))
5493
    {
5494
      if (DECL_THREAD_LOCAL_P (decl))
5495
        return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5496
 
5497
      if (DECL_HAS_VALUE_EXPR_P (decl))
5498
        {
5499
          tree value = get_base_address (DECL_VALUE_EXPR (decl));
5500
 
5501
          if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5502
            return omp_notice_threadprivate_variable (ctx, decl, value);
5503
        }
5504
    }
5505
 
5506
  n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5507
  if (n == NULL)
5508
    {
5509
      enum omp_clause_default_kind default_kind, kind;
5510
      struct gimplify_omp_ctx *octx;
5511
 
5512
      if (ctx->region_type == ORT_WORKSHARE)
5513
        goto do_outer;
5514
 
5515
      /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5516
         remapped firstprivate instead of shared.  To some extent this is
5517
         addressed in omp_firstprivatize_type_sizes, but not effectively.  */
5518
      default_kind = ctx->default_kind;
5519
      kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5520
      if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5521
        default_kind = kind;
5522
 
5523
      switch (default_kind)
5524
        {
5525
        case OMP_CLAUSE_DEFAULT_NONE:
5526
          error ("%qE not specified in enclosing parallel",
5527
                 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5528
          if ((ctx->region_type & ORT_TASK) != 0)
5529
            error_at (ctx->location, "enclosing task");
5530
          else
5531
            error_at (ctx->location, "enclosing parallel");
5532
          /* FALLTHRU */
5533
        case OMP_CLAUSE_DEFAULT_SHARED:
5534
          flags |= GOVD_SHARED;
5535
          break;
5536
        case OMP_CLAUSE_DEFAULT_PRIVATE:
5537
          flags |= GOVD_PRIVATE;
5538
          break;
5539
        case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5540
          flags |= GOVD_FIRSTPRIVATE;
5541
          break;
5542
        case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5543
          /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED.  */
5544
          gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5545
          if (ctx->outer_context)
5546
            omp_notice_variable (ctx->outer_context, decl, in_code);
5547
          for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5548
            {
5549
              splay_tree_node n2;
5550
 
5551
              n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5552
              if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5553
                {
5554
                  flags |= GOVD_FIRSTPRIVATE;
5555
                  break;
5556
                }
5557
              if ((octx->region_type & ORT_PARALLEL) != 0)
5558
                break;
5559
            }
5560
          if (flags & GOVD_FIRSTPRIVATE)
5561
            break;
5562
          if (octx == NULL
5563
              && (TREE_CODE (decl) == PARM_DECL
5564
                  || (!is_global_var (decl)
5565
                      && DECL_CONTEXT (decl) == current_function_decl)))
5566
            {
5567
              flags |= GOVD_FIRSTPRIVATE;
5568
              break;
5569
            }
5570
          flags |= GOVD_SHARED;
5571
          break;
5572
        default:
5573
          gcc_unreachable ();
5574
        }
5575
 
5576
      if ((flags & GOVD_PRIVATE)
5577
          && lang_hooks.decls.omp_private_outer_ref (decl))
5578
        flags |= GOVD_PRIVATE_OUTER_REF;
5579
 
5580
      omp_add_variable (ctx, decl, flags);
5581
 
5582
      shared = (flags & GOVD_SHARED) != 0;
5583
      ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5584
      goto do_outer;
5585
    }
5586
 
5587
  if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5588
      && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5589
      && DECL_SIZE (decl)
5590
      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5591
    {
5592
      splay_tree_node n2;
5593
      tree t = DECL_VALUE_EXPR (decl);
5594
      gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5595
      t = TREE_OPERAND (t, 0);
5596
      gcc_assert (DECL_P (t));
5597
      n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5598
      n2->value |= GOVD_SEEN;
5599
    }
5600
 
5601
  shared = ((flags | n->value) & GOVD_SHARED) != 0;
5602
  ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5603
 
5604
  /* If nothing changed, there's nothing left to do.  */
5605
  if ((n->value & flags) == flags)
5606
    return ret;
5607
  flags |= n->value;
5608
  n->value = flags;
5609
 
5610
 do_outer:
5611
  /* If the variable is private in the current context, then we don't
5612
     need to propagate anything to an outer context.  */
5613
  if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5614
    return ret;
5615
  if (ctx->outer_context
5616
      && omp_notice_variable (ctx->outer_context, decl, in_code))
5617
    return true;
5618
  return ret;
5619
}
5620
 
5621
/* Verify that DECL is private within CTX.  If there's specific information
5622
   to the contrary in the innermost scope, generate an error.  */
5623
 
5624
static bool
5625
omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5626
{
5627
  splay_tree_node n;
5628
 
5629
  n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5630
  if (n != NULL)
5631
    {
5632
      if (n->value & GOVD_SHARED)
5633
        {
5634
          if (ctx == gimplify_omp_ctxp)
5635
            {
5636
              error ("iteration variable %qE should be private",
5637
                     DECL_NAME (decl));
5638
              n->value = GOVD_PRIVATE;
5639
              return true;
5640
            }
5641
          else
5642
            return false;
5643
        }
5644
      else if ((n->value & GOVD_EXPLICIT) != 0
5645
               && (ctx == gimplify_omp_ctxp
5646
                   || (ctx->region_type == ORT_COMBINED_PARALLEL
5647
                       && gimplify_omp_ctxp->outer_context == ctx)))
5648
        {
5649
          if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5650
            error ("iteration variable %qE should not be firstprivate",
5651
                   DECL_NAME (decl));
5652
          else if ((n->value & GOVD_REDUCTION) != 0)
5653
            error ("iteration variable %qE should not be reduction",
5654
                   DECL_NAME (decl));
5655
        }
5656
      return (ctx == gimplify_omp_ctxp
5657
              || (ctx->region_type == ORT_COMBINED_PARALLEL
5658
                  && gimplify_omp_ctxp->outer_context == ctx));
5659
    }
5660
 
5661
  if (ctx->region_type != ORT_WORKSHARE)
5662
    return false;
5663
  else if (ctx->outer_context)
5664
    return omp_is_private (ctx->outer_context, decl);
5665
  return false;
5666
}
5667
 
5668
/* Return true if DECL is private within a parallel region
5669
   that binds to the current construct's context or in parallel
5670
   region's REDUCTION clause.  */
5671
 
5672
static bool
5673
omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5674
{
5675
  splay_tree_node n;
5676
 
5677
  do
5678
    {
5679
      ctx = ctx->outer_context;
5680
      if (ctx == NULL)
5681
        return !(is_global_var (decl)
5682
                 /* References might be private, but might be shared too.  */
5683
                 || lang_hooks.decls.omp_privatize_by_reference (decl));
5684
 
5685
      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5686
      if (n != NULL)
5687
        return (n->value & GOVD_SHARED) == 0;
5688
    }
5689
  while (ctx->region_type == ORT_WORKSHARE);
5690
  return false;
5691
}
5692
 
5693
/* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5694
   and previous omp contexts.  */
5695
 
5696
static void
5697
gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5698
                           enum omp_region_type region_type)
5699
{
5700
  struct gimplify_omp_ctx *ctx, *outer_ctx;
5701
  struct gimplify_ctx gctx;
5702
  tree c;
5703
 
5704
  ctx = new_omp_context (region_type);
5705
  outer_ctx = ctx->outer_context;
5706
 
5707
  while ((c = *list_p) != NULL)
5708
    {
5709
      bool remove = false;
5710
      bool notice_outer = true;
5711
      const char *check_non_private = NULL;
5712
      unsigned int flags;
5713
      tree decl;
5714
 
5715
      switch (OMP_CLAUSE_CODE (c))
5716
        {
5717
        case OMP_CLAUSE_PRIVATE:
5718
          flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5719
          if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5720
            {
5721
              flags |= GOVD_PRIVATE_OUTER_REF;
5722
              OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5723
            }
5724
          else
5725
            notice_outer = false;
5726
          goto do_add;
5727
        case OMP_CLAUSE_SHARED:
5728
          flags = GOVD_SHARED | GOVD_EXPLICIT;
5729
          goto do_add;
5730
        case OMP_CLAUSE_FIRSTPRIVATE:
5731
          flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5732
          check_non_private = "firstprivate";
5733
          goto do_add;
5734
        case OMP_CLAUSE_LASTPRIVATE:
5735
          flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5736
          check_non_private = "lastprivate";
5737
          goto do_add;
5738
        case OMP_CLAUSE_REDUCTION:
5739
          flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5740
          check_non_private = "reduction";
5741
          goto do_add;
5742
 
5743
        do_add:
5744
          decl = OMP_CLAUSE_DECL (c);
5745
          if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5746
            {
5747
              remove = true;
5748
              break;
5749
            }
5750
          omp_add_variable (ctx, decl, flags);
5751
          if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5752
              && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5753
            {
5754
              omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5755
                                GOVD_LOCAL | GOVD_SEEN);
5756
              gimplify_omp_ctxp = ctx;
5757
              push_gimplify_context (&gctx);
5758
 
5759
              OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5760
              OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5761
 
5762
              gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5763
                                &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5764
              pop_gimplify_context
5765
                (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5766
              push_gimplify_context (&gctx);
5767
              gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5768
                                &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5769
              pop_gimplify_context
5770
                (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5771
              OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5772
              OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5773
 
5774
              gimplify_omp_ctxp = outer_ctx;
5775
            }
5776
          else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5777
                   && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5778
            {
5779
              gimplify_omp_ctxp = ctx;
5780
              push_gimplify_context (&gctx);
5781
              if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5782
                {
5783
                  tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5784
                                      NULL, NULL);
5785
                  TREE_SIDE_EFFECTS (bind) = 1;
5786
                  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5787
                  OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5788
                }
5789
              gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5790
                                &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5791
              pop_gimplify_context
5792
                (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5793
              OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5794
 
5795
              gimplify_omp_ctxp = outer_ctx;
5796
            }
5797
          if (notice_outer)
5798
            goto do_notice;
5799
          break;
5800
 
5801
        case OMP_CLAUSE_COPYIN:
5802
        case OMP_CLAUSE_COPYPRIVATE:
5803
          decl = OMP_CLAUSE_DECL (c);
5804
          if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5805
            {
5806
              remove = true;
5807
              break;
5808
            }
5809
        do_notice:
5810
          if (outer_ctx)
5811
            omp_notice_variable (outer_ctx, decl, true);
5812
          if (check_non_private
5813
              && region_type == ORT_WORKSHARE
5814
              && omp_check_private (ctx, decl))
5815
            {
5816
              error ("%s variable %qE is private in outer context",
5817
                     check_non_private, DECL_NAME (decl));
5818
              remove = true;
5819
            }
5820
          break;
5821
 
5822
        case OMP_CLAUSE_IF:
5823
          OMP_CLAUSE_OPERAND (c, 0)
5824
            = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5825
          /* Fall through.  */
5826
 
5827
        case OMP_CLAUSE_SCHEDULE:
5828
        case OMP_CLAUSE_NUM_THREADS:
5829
          if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5830
                             is_gimple_val, fb_rvalue) == GS_ERROR)
5831
              remove = true;
5832
          break;
5833
 
5834
        case OMP_CLAUSE_NOWAIT:
5835
        case OMP_CLAUSE_ORDERED:
5836
        case OMP_CLAUSE_UNTIED:
5837
        case OMP_CLAUSE_COLLAPSE:
5838
          break;
5839
 
5840
        case OMP_CLAUSE_DEFAULT:
5841
          ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5842
          break;
5843
 
5844
        default:
5845
          gcc_unreachable ();
5846
        }
5847
 
5848
      if (remove)
5849
        *list_p = OMP_CLAUSE_CHAIN (c);
5850
      else
5851
        list_p = &OMP_CLAUSE_CHAIN (c);
5852
    }
5853
 
5854
  gimplify_omp_ctxp = ctx;
5855
}
5856
 
5857
/* For all variables that were not actually used within the context,
5858
   remove PRIVATE, SHARED, and FIRSTPRIVATE clauses.  */
5859
 
5860
static int
5861
gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5862
{
5863
  tree *list_p = (tree *) data;
5864
  tree decl = (tree) n->key;
5865
  unsigned flags = n->value;
5866
  enum omp_clause_code code;
5867
  tree clause;
5868
  bool private_debug;
5869
 
5870
  if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
5871
    return 0;
5872
  if ((flags & GOVD_SEEN) == 0)
5873
    return 0;
5874
  if (flags & GOVD_DEBUG_PRIVATE)
5875
    {
5876
      gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
5877
      private_debug = true;
5878
    }
5879
  else
5880
    private_debug
5881
      = lang_hooks.decls.omp_private_debug_clause (decl,
5882
                                                   !!(flags & GOVD_SHARED));
5883
  if (private_debug)
5884
    code = OMP_CLAUSE_PRIVATE;
5885
  else if (flags & GOVD_SHARED)
5886
    {
5887
      if (is_global_var (decl))
5888
        {
5889
          struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
5890
          while (ctx != NULL)
5891
            {
5892
              splay_tree_node on
5893
                = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5894
              if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5895
                                      | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
5896
                break;
5897
              ctx = ctx->outer_context;
5898
            }
5899
          if (ctx == NULL)
5900
            return 0;
5901
        }
5902
      code = OMP_CLAUSE_SHARED;
5903
    }
5904
  else if (flags & GOVD_PRIVATE)
5905
    code = OMP_CLAUSE_PRIVATE;
5906
  else if (flags & GOVD_FIRSTPRIVATE)
5907
    code = OMP_CLAUSE_FIRSTPRIVATE;
5908
  else
5909
    gcc_unreachable ();
5910
 
5911
  clause = build_omp_clause (input_location, code);
5912
  OMP_CLAUSE_DECL (clause) = decl;
5913
  OMP_CLAUSE_CHAIN (clause) = *list_p;
5914
  if (private_debug)
5915
    OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
5916
  else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
5917
    OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
5918
  *list_p = clause;
5919
  lang_hooks.decls.omp_finish_clause (clause);
5920
 
5921
  return 0;
5922
}
5923
 
5924
static void
5925
gimplify_adjust_omp_clauses (tree *list_p)
5926
{
5927
  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
5928
  tree c, decl;
5929
 
5930
  while ((c = *list_p) != NULL)
5931
    {
5932
      splay_tree_node n;
5933
      bool remove = false;
5934
 
5935
      switch (OMP_CLAUSE_CODE (c))
5936
        {
5937
        case OMP_CLAUSE_PRIVATE:
5938
        case OMP_CLAUSE_SHARED:
5939
        case OMP_CLAUSE_FIRSTPRIVATE:
5940
          decl = OMP_CLAUSE_DECL (c);
5941
          n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5942
          remove = !(n->value & GOVD_SEEN);
5943
          if (! remove)
5944
            {
5945
              bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
5946
              if ((n->value & GOVD_DEBUG_PRIVATE)
5947
                  || lang_hooks.decls.omp_private_debug_clause (decl, shared))
5948
                {
5949
                  gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
5950
                              || ((n->value & GOVD_DATA_SHARE_CLASS)
5951
                                  == GOVD_PRIVATE));
5952
                  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
5953
                  OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
5954
                }
5955
            }
5956
          break;
5957
 
5958
        case OMP_CLAUSE_LASTPRIVATE:
5959
          /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
5960
             accurately reflect the presence of a FIRSTPRIVATE clause.  */
5961
          decl = OMP_CLAUSE_DECL (c);
5962
          n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5963
          OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5964
            = (n->value & GOVD_FIRSTPRIVATE) != 0;
5965
          break;
5966
 
5967
        case OMP_CLAUSE_REDUCTION:
5968
        case OMP_CLAUSE_COPYIN:
5969
        case OMP_CLAUSE_COPYPRIVATE:
5970
        case OMP_CLAUSE_IF:
5971
        case OMP_CLAUSE_NUM_THREADS:
5972
        case OMP_CLAUSE_SCHEDULE:
5973
        case OMP_CLAUSE_NOWAIT:
5974
        case OMP_CLAUSE_ORDERED:
5975
        case OMP_CLAUSE_DEFAULT:
5976
        case OMP_CLAUSE_UNTIED:
5977
        case OMP_CLAUSE_COLLAPSE:
5978
          break;
5979
 
5980
        default:
5981
          gcc_unreachable ();
5982
        }
5983
 
5984
      if (remove)
5985
        *list_p = OMP_CLAUSE_CHAIN (c);
5986
      else
5987
        list_p = &OMP_CLAUSE_CHAIN (c);
5988
    }
5989
 
5990
  /* Add in any implicit data sharing.  */
5991
  splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
5992
 
5993
  gimplify_omp_ctxp = ctx->outer_context;
5994
  delete_omp_context (ctx);
5995
}
5996
 
5997
/* Gimplify the contents of an OMP_PARALLEL statement.  This involves
5998
   gimplification of the body, as well as scanning the body for used
5999
   variables.  We need to do this scan now, because variable-sized
6000
   decls will be decomposed during gimplification.  */
6001
 
6002
static void
6003
gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6004
{
6005
  tree expr = *expr_p;
6006
  gimple g;
6007
  gimple_seq body = NULL;
6008
  struct gimplify_ctx gctx;
6009
 
6010
  gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6011
                             OMP_PARALLEL_COMBINED (expr)
6012
                             ? ORT_COMBINED_PARALLEL
6013
                             : ORT_PARALLEL);
6014
 
6015
  push_gimplify_context (&gctx);
6016
 
6017
  g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6018
  if (gimple_code (g) == GIMPLE_BIND)
6019
    pop_gimplify_context (g);
6020
  else
6021
    pop_gimplify_context (NULL);
6022
 
6023
  gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6024
 
6025
  g = gimple_build_omp_parallel (body,
6026
                                 OMP_PARALLEL_CLAUSES (expr),
6027
                                 NULL_TREE, NULL_TREE);
6028
  if (OMP_PARALLEL_COMBINED (expr))
6029
    gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6030
  gimplify_seq_add_stmt (pre_p, g);
6031
  *expr_p = NULL_TREE;
6032
}
6033
 
6034
/* Gimplify the contents of an OMP_TASK statement.  This involves
6035
   gimplification of the body, as well as scanning the body for used
6036
   variables.  We need to do this scan now, because variable-sized
6037
   decls will be decomposed during gimplification.  */
6038
 
6039
static void
6040
gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6041
{
6042
  tree expr = *expr_p;
6043
  gimple g;
6044
  gimple_seq body = NULL;
6045
  struct gimplify_ctx gctx;
6046
 
6047
  gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6048
                             find_omp_clause (OMP_TASK_CLAUSES (expr),
6049
                                              OMP_CLAUSE_UNTIED)
6050
                             ? ORT_UNTIED_TASK : ORT_TASK);
6051
 
6052
  push_gimplify_context (&gctx);
6053
 
6054
  g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6055
  if (gimple_code (g) == GIMPLE_BIND)
6056
    pop_gimplify_context (g);
6057
  else
6058
    pop_gimplify_context (NULL);
6059
 
6060
  gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6061
 
6062
  g = gimple_build_omp_task (body,
6063
                             OMP_TASK_CLAUSES (expr),
6064
                             NULL_TREE, NULL_TREE,
6065
                             NULL_TREE, NULL_TREE, NULL_TREE);
6066
  gimplify_seq_add_stmt (pre_p, g);
6067
  *expr_p = NULL_TREE;
6068
}
6069
 
6070
/* Gimplify the gross structure of an OMP_FOR statement.  */
6071
 
6072
static enum gimplify_status
6073
gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6074
{
6075
  tree for_stmt, decl, var, t;
6076
  enum gimplify_status ret = GS_ALL_DONE;
6077
  enum gimplify_status tret;
6078
  gimple gfor;
6079
  gimple_seq for_body, for_pre_body;
6080
  int i;
6081
 
6082
  for_stmt = *expr_p;
6083
 
6084
  gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6085
                             ORT_WORKSHARE);
6086
 
6087
  /* Handle OMP_FOR_INIT.  */
6088
  for_pre_body = NULL;
6089
  gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6090
  OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6091
 
6092
  for_body = gimple_seq_alloc ();
6093
  gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6094
              == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6095
  gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6096
              == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6097
  for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6098
    {
6099
      t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6100
      gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6101
      decl = TREE_OPERAND (t, 0);
6102
      gcc_assert (DECL_P (decl));
6103
      gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6104
                  || POINTER_TYPE_P (TREE_TYPE (decl)));
6105
 
6106
      /* Make sure the iteration variable is private.  */
6107
      if (omp_is_private (gimplify_omp_ctxp, decl))
6108
        omp_notice_variable (gimplify_omp_ctxp, decl, true);
6109
      else
6110
        omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6111
 
6112
      /* If DECL is not a gimple register, create a temporary variable to act
6113
         as an iteration counter.  This is valid, since DECL cannot be
6114
         modified in the body of the loop.  */
6115
      if (!is_gimple_reg (decl))
6116
        {
6117
          var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6118
          TREE_OPERAND (t, 0) = var;
6119
 
6120
          gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6121
 
6122
          omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6123
        }
6124
      else
6125
        var = decl;
6126
 
6127
      tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6128
                            is_gimple_val, fb_rvalue);
6129
      ret = MIN (ret, tret);
6130
      if (ret == GS_ERROR)
6131
        return ret;
6132
 
6133
      /* Handle OMP_FOR_COND.  */
6134
      t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6135
      gcc_assert (COMPARISON_CLASS_P (t));
6136
      gcc_assert (TREE_OPERAND (t, 0) == decl);
6137
 
6138
      tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6139
                            is_gimple_val, fb_rvalue);
6140
      ret = MIN (ret, tret);
6141
 
6142
      /* Handle OMP_FOR_INCR.  */
6143
      t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6144
      switch (TREE_CODE (t))
6145
        {
6146
        case PREINCREMENT_EXPR:
6147
        case POSTINCREMENT_EXPR:
6148
          t = build_int_cst (TREE_TYPE (decl), 1);
6149
          t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6150
          t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6151
          TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6152
          break;
6153
 
6154
        case PREDECREMENT_EXPR:
6155
        case POSTDECREMENT_EXPR:
6156
          t = build_int_cst (TREE_TYPE (decl), -1);
6157
          t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6158
          t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6159
          TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6160
          break;
6161
 
6162
        case MODIFY_EXPR:
6163
          gcc_assert (TREE_OPERAND (t, 0) == decl);
6164
          TREE_OPERAND (t, 0) = var;
6165
 
6166
          t = TREE_OPERAND (t, 1);
6167
          switch (TREE_CODE (t))
6168
            {
6169
            case PLUS_EXPR:
6170
              if (TREE_OPERAND (t, 1) == decl)
6171
                {
6172
                  TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6173
                  TREE_OPERAND (t, 0) = var;
6174
                  break;
6175
                }
6176
 
6177
              /* Fallthru.  */
6178
            case MINUS_EXPR:
6179
            case POINTER_PLUS_EXPR:
6180
              gcc_assert (TREE_OPERAND (t, 0) == decl);
6181
              TREE_OPERAND (t, 0) = var;
6182
              break;
6183
            default:
6184
              gcc_unreachable ();
6185
            }
6186
 
6187
          tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6188
                                is_gimple_val, fb_rvalue);
6189
          ret = MIN (ret, tret);
6190
          break;
6191
 
6192
        default:
6193
          gcc_unreachable ();
6194
        }
6195
 
6196
      if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6197
        {
6198
          tree c;
6199
          for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6200
            if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6201
                && OMP_CLAUSE_DECL (c) == decl
6202
                && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6203
              {
6204
                t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6205
                gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6206
                gcc_assert (TREE_OPERAND (t, 0) == var);
6207
                t = TREE_OPERAND (t, 1);
6208
                gcc_assert (TREE_CODE (t) == PLUS_EXPR
6209
                            || TREE_CODE (t) == MINUS_EXPR
6210
                            || TREE_CODE (t) == POINTER_PLUS_EXPR);
6211
                gcc_assert (TREE_OPERAND (t, 0) == var);
6212
                t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6213
                            TREE_OPERAND (t, 1));
6214
                gimplify_assign (decl, t,
6215
                                 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6216
            }
6217
        }
6218
    }
6219
 
6220
  gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6221
 
6222
  gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6223
 
6224
  gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6225
                               TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6226
                               for_pre_body);
6227
 
6228
  for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6229
    {
6230
      t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6231
      gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6232
      gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6233
      t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6234
      gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6235
      gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6236
      t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6237
      gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6238
    }
6239
 
6240
  gimplify_seq_add_stmt (pre_p, gfor);
6241
  return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6242
}
6243
 
6244
/* Gimplify the gross structure of other OpenMP worksharing constructs.
6245
   In particular, OMP_SECTIONS and OMP_SINGLE.  */
6246
 
6247
static void
6248
gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6249
{
6250
  tree expr = *expr_p;
6251
  gimple stmt;
6252
  gimple_seq body = NULL;
6253
 
6254
  gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6255
  gimplify_and_add (OMP_BODY (expr), &body);
6256
  gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6257
 
6258
  if (TREE_CODE (expr) == OMP_SECTIONS)
6259
    stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6260
  else if (TREE_CODE (expr) == OMP_SINGLE)
6261
    stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6262
  else
6263
    gcc_unreachable ();
6264
 
6265
  gimplify_seq_add_stmt (pre_p, stmt);
6266
}
6267
 
6268
/* A subroutine of gimplify_omp_atomic.  The front end is supposed to have
6269
   stabilized the lhs of the atomic operation as *ADDR.  Return true if
6270
   EXPR is this stabilized form.  */
6271
 
6272
static bool
6273
goa_lhs_expr_p (tree expr, tree addr)
6274
{
6275
  /* Also include casts to other type variants.  The C front end is fond
6276
     of adding these for e.g. volatile variables.  This is like
6277
     STRIP_TYPE_NOPS but includes the main variant lookup.  */
6278
  STRIP_USELESS_TYPE_CONVERSION (expr);
6279
 
6280
  if (TREE_CODE (expr) == INDIRECT_REF)
6281
    {
6282
      expr = TREE_OPERAND (expr, 0);
6283
      while (expr != addr
6284
             && (CONVERT_EXPR_P (expr)
6285
                 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6286
             && TREE_CODE (expr) == TREE_CODE (addr)
6287
             && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
6288
        {
6289
          expr = TREE_OPERAND (expr, 0);
6290
          addr = TREE_OPERAND (addr, 0);
6291
        }
6292
      if (expr == addr)
6293
        return true;
6294
      return (TREE_CODE (addr) == ADDR_EXPR
6295
              && TREE_CODE (expr) == ADDR_EXPR
6296
              && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6297
    }
6298
  if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6299
    return true;
6300
  return false;
6301
}
6302
 
6303
/* Walk *EXPR_P and replace
6304
   appearances of *LHS_ADDR with LHS_VAR.  If an expression does not involve
6305
   the lhs, evaluate it into a temporary.  Return 1 if the lhs appeared as
6306
   a subexpression, 0 if it did not, or -1 if an error was encountered.  */
6307
 
6308
static int
6309
goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6310
                    tree lhs_var)
6311
{
6312
  tree expr = *expr_p;
6313
  int saw_lhs;
6314
 
6315
  if (goa_lhs_expr_p (expr, lhs_addr))
6316
    {
6317
      *expr_p = lhs_var;
6318
      return 1;
6319
    }
6320
  if (is_gimple_val (expr))
6321
    return 0;
6322
 
6323
  saw_lhs = 0;
6324
  switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6325
    {
6326
    case tcc_binary:
6327
    case tcc_comparison:
6328
      saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6329
                                     lhs_var);
6330
    case tcc_unary:
6331
      saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6332
                                     lhs_var);
6333
      break;
6334
    case tcc_expression:
6335
      switch (TREE_CODE (expr))
6336
        {
6337
        case TRUTH_ANDIF_EXPR:
6338
        case TRUTH_ORIF_EXPR:
6339
          saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6340
                                         lhs_addr, lhs_var);
6341
          saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6342
                                         lhs_addr, lhs_var);
6343
          break;
6344
        default:
6345
          break;
6346
        }
6347
      break;
6348
    default:
6349
      break;
6350
    }
6351
 
6352
  if (saw_lhs == 0)
6353
    {
6354
      enum gimplify_status gs;
6355
      gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6356
      if (gs != GS_ALL_DONE)
6357
        saw_lhs = -1;
6358
    }
6359
 
6360
  return saw_lhs;
6361
}
6362
 
6363
 
6364
/* Gimplify an OMP_ATOMIC statement.  */
6365
 
6366
static enum gimplify_status
6367
gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6368
{
6369
  tree addr = TREE_OPERAND (*expr_p, 0);
6370
  tree rhs = TREE_OPERAND (*expr_p, 1);
6371
  tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6372
  tree tmp_load;
6373
 
6374
   tmp_load = create_tmp_var (type, NULL);
6375
   if (TREE_CODE (type) == COMPLEX_TYPE || TREE_CODE (type) == VECTOR_TYPE)
6376
     DECL_GIMPLE_REG_P (tmp_load) = 1;
6377
   if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6378
     return GS_ERROR;
6379
 
6380
   if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6381
       != GS_ALL_DONE)
6382
     return GS_ERROR;
6383
 
6384
   gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr));
6385
   if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6386
       != GS_ALL_DONE)
6387
     return GS_ERROR;
6388
   gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs));
6389
   *expr_p = NULL;
6390
 
6391
   return GS_ALL_DONE;
6392
}
6393
 
6394
 
6395
/* Converts the GENERIC expression tree *EXPR_P to GIMPLE.  If the
6396
   expression produces a value to be used as an operand inside a GIMPLE
6397
   statement, the value will be stored back in *EXPR_P.  This value will
6398
   be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6399
   an SSA_NAME.  The corresponding sequence of GIMPLE statements is
6400
   emitted in PRE_P and POST_P.
6401
 
6402
   Additionally, this process may overwrite parts of the input
6403
   expression during gimplification.  Ideally, it should be
6404
   possible to do non-destructive gimplification.
6405
 
6406
   EXPR_P points to the GENERIC expression to convert to GIMPLE.  If
6407
      the expression needs to evaluate to a value to be used as
6408
      an operand in a GIMPLE statement, this value will be stored in
6409
      *EXPR_P on exit.  This happens when the caller specifies one
6410
      of fb_lvalue or fb_rvalue fallback flags.
6411
 
6412
   PRE_P will contain the sequence of GIMPLE statements corresponding
6413
       to the evaluation of EXPR and all the side-effects that must
6414
       be executed before the main expression.  On exit, the last
6415
       statement of PRE_P is the core statement being gimplified.  For
6416
       instance, when gimplifying 'if (++a)' the last statement in
6417
       PRE_P will be 'if (t.1)' where t.1 is the result of
6418
       pre-incrementing 'a'.
6419
 
6420
   POST_P will contain the sequence of GIMPLE statements corresponding
6421
       to the evaluation of all the side-effects that must be executed
6422
       after the main expression.  If this is NULL, the post
6423
       side-effects are stored at the end of PRE_P.
6424
 
6425
       The reason why the output is split in two is to handle post
6426
       side-effects explicitly.  In some cases, an expression may have
6427
       inner and outer post side-effects which need to be emitted in
6428
       an order different from the one given by the recursive
6429
       traversal.  For instance, for the expression (*p--)++ the post
6430
       side-effects of '--' must actually occur *after* the post
6431
       side-effects of '++'.  However, gimplification will first visit
6432
       the inner expression, so if a separate POST sequence was not
6433
       used, the resulting sequence would be:
6434
 
6435
            1   t.1 = *p
6436
            2   p = p - 1
6437
            3   t.2 = t.1 + 1
6438
            4   *p = t.2
6439
 
6440
       However, the post-decrement operation in line #2 must not be
6441
       evaluated until after the store to *p at line #4, so the
6442
       correct sequence should be:
6443
 
6444
            1   t.1 = *p
6445
            2   t.2 = t.1 + 1
6446
            3   *p = t.2
6447
            4   p = p - 1
6448
 
6449
       So, by specifying a separate post queue, it is possible
6450
       to emit the post side-effects in the correct order.
6451
       If POST_P is NULL, an internal queue will be used.  Before
6452
       returning to the caller, the sequence POST_P is appended to
6453
       the main output sequence PRE_P.
6454
 
6455
   GIMPLE_TEST_F points to a function that takes a tree T and
6456
       returns nonzero if T is in the GIMPLE form requested by the
6457
       caller.  The GIMPLE predicates are in tree-gimple.c.
6458
 
6459
   FALLBACK tells the function what sort of a temporary we want if
6460
       gimplification cannot produce an expression that complies with
6461
       GIMPLE_TEST_F.
6462
 
6463
       fb_none means that no temporary should be generated
6464
       fb_rvalue means that an rvalue is OK to generate
6465
       fb_lvalue means that an lvalue is OK to generate
6466
       fb_either means that either is OK, but an lvalue is preferable.
6467
       fb_mayfail means that gimplification may fail (in which case
6468
       GS_ERROR will be returned)
6469
 
6470
   The return value is either GS_ERROR or GS_ALL_DONE, since this
6471
   function iterates until EXPR is completely gimplified or an error
6472
   occurs.  */
6473
 
6474
enum gimplify_status
6475
gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6476
               bool (*gimple_test_f) (tree), fallback_t fallback)
6477
{
6478
  tree tmp;
6479
  gimple_seq internal_pre = NULL;
6480
  gimple_seq internal_post = NULL;
6481
  tree save_expr;
6482
  bool is_statement;
6483
  location_t saved_location;
6484
  enum gimplify_status ret;
6485
  gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6486
 
6487
  save_expr = *expr_p;
6488
  if (save_expr == NULL_TREE)
6489
    return GS_ALL_DONE;
6490
 
6491
  /* If we are gimplifying a top-level statement, PRE_P must be valid.  */
6492
  is_statement = gimple_test_f == is_gimple_stmt;
6493
  if (is_statement)
6494
    gcc_assert (pre_p);
6495
 
6496
  /* Consistency checks.  */
6497
  if (gimple_test_f == is_gimple_reg)
6498
    gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6499
  else if (gimple_test_f == is_gimple_val
6500
           || gimple_test_f == is_gimple_call_addr
6501
           || gimple_test_f == is_gimple_condexpr
6502
           || gimple_test_f == is_gimple_mem_rhs
6503
           || gimple_test_f == is_gimple_mem_rhs_or_call
6504
           || gimple_test_f == is_gimple_reg_rhs
6505
           || gimple_test_f == is_gimple_reg_rhs_or_call
6506
           || gimple_test_f == is_gimple_asm_val)
6507
    gcc_assert (fallback & fb_rvalue);
6508
  else if (gimple_test_f == is_gimple_min_lval
6509
           || gimple_test_f == is_gimple_lvalue)
6510
    gcc_assert (fallback & fb_lvalue);
6511
  else if (gimple_test_f == is_gimple_addressable)
6512
    gcc_assert (fallback & fb_either);
6513
  else if (gimple_test_f == is_gimple_stmt)
6514
    gcc_assert (fallback == fb_none);
6515
  else
6516
    {
6517
      /* We should have recognized the GIMPLE_TEST_F predicate to
6518
         know what kind of fallback to use in case a temporary is
6519
         needed to hold the value or address of *EXPR_P.  */
6520
      gcc_unreachable ();
6521
    }
6522
 
6523
  /* We used to check the predicate here and return immediately if it
6524
     succeeds.  This is wrong; the design is for gimplification to be
6525
     idempotent, and for the predicates to only test for valid forms, not
6526
     whether they are fully simplified.  */
6527
  if (pre_p == NULL)
6528
    pre_p = &internal_pre;
6529
 
6530
  if (post_p == NULL)
6531
    post_p = &internal_post;
6532
 
6533
  /* Remember the last statements added to PRE_P and POST_P.  Every
6534
     new statement added by the gimplification helpers needs to be
6535
     annotated with location information.  To centralize the
6536
     responsibility, we remember the last statement that had been
6537
     added to both queues before gimplifying *EXPR_P.  If
6538
     gimplification produces new statements in PRE_P and POST_P, those
6539
     statements will be annotated with the same location information
6540
     as *EXPR_P.  */
6541
  pre_last_gsi = gsi_last (*pre_p);
6542
  post_last_gsi = gsi_last (*post_p);
6543
 
6544
  saved_location = input_location;
6545
  if (save_expr != error_mark_node
6546
      && EXPR_HAS_LOCATION (*expr_p))
6547
    input_location = EXPR_LOCATION (*expr_p);
6548
 
6549
  /* Loop over the specific gimplifiers until the toplevel node
6550
     remains the same.  */
6551
  do
6552
    {
6553
      /* Strip away as many useless type conversions as possible
6554
         at the toplevel.  */
6555
      STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6556
 
6557
      /* Remember the expr.  */
6558
      save_expr = *expr_p;
6559
 
6560
      /* Die, die, die, my darling.  */
6561
      if (save_expr == error_mark_node
6562
          || (TREE_TYPE (save_expr)
6563
              && TREE_TYPE (save_expr) == error_mark_node))
6564
        {
6565
          ret = GS_ERROR;
6566
          break;
6567
        }
6568
 
6569
      /* Do any language-specific gimplification.  */
6570
      ret = ((enum gimplify_status)
6571
             lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6572
      if (ret == GS_OK)
6573
        {
6574
          if (*expr_p == NULL_TREE)
6575
            break;
6576
          if (*expr_p != save_expr)
6577
            continue;
6578
        }
6579
      else if (ret != GS_UNHANDLED)
6580
        break;
6581
 
6582
      ret = GS_OK;
6583
      switch (TREE_CODE (*expr_p))
6584
        {
6585
          /* First deal with the special cases.  */
6586
 
6587
        case POSTINCREMENT_EXPR:
6588
        case POSTDECREMENT_EXPR:
6589
        case PREINCREMENT_EXPR:
6590
        case PREDECREMENT_EXPR:
6591
          ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6592
                                        fallback != fb_none);
6593
          break;
6594
 
6595
        case ARRAY_REF:
6596
        case ARRAY_RANGE_REF:
6597
        case REALPART_EXPR:
6598
        case IMAGPART_EXPR:
6599
        case COMPONENT_REF:
6600
        case VIEW_CONVERT_EXPR:
6601
          ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6602
                                        fallback ? fallback : fb_rvalue);
6603
          break;
6604
 
6605
        case COND_EXPR:
6606
          ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6607
 
6608
          /* C99 code may assign to an array in a structure value of a
6609
             conditional expression, and this has undefined behavior
6610
             only on execution, so create a temporary if an lvalue is
6611
             required.  */
6612
          if (fallback == fb_lvalue)
6613
            {
6614
              *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6615
              mark_addressable (*expr_p);
6616
            }
6617
          break;
6618
 
6619
        case CALL_EXPR:
6620
          ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6621
 
6622
          /* C99 code may assign to an array in a structure returned
6623
             from a function, and this has undefined behavior only on
6624
             execution, so create a temporary if an lvalue is
6625
             required.  */
6626
          if (fallback == fb_lvalue)
6627
            {
6628
              *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6629
              mark_addressable (*expr_p);
6630
            }
6631
          break;
6632
 
6633
        case TREE_LIST:
6634
          gcc_unreachable ();
6635
 
6636
        case COMPOUND_EXPR:
6637
          ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6638
          break;
6639
 
6640
        case COMPOUND_LITERAL_EXPR:
6641
          ret = gimplify_compound_literal_expr (expr_p, pre_p);
6642
          break;
6643
 
6644
        case MODIFY_EXPR:
6645
        case INIT_EXPR:
6646
          ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6647
                                      fallback != fb_none);
6648
          /* Don't let the end of loop logic change GS_OK to GS_ALL_DONE;
6649
             gimplify_modify_expr_rhs might have changed the RHS.  */
6650
          if (ret == GS_OK && *expr_p)
6651
            continue;
6652
          break;
6653
 
6654
        case TRUTH_ANDIF_EXPR:
6655
        case TRUTH_ORIF_EXPR:
6656
          /* Pass the source location of the outer expression.  */
6657
          ret = gimplify_boolean_expr (expr_p, saved_location);
6658
          break;
6659
 
6660
        case TRUTH_NOT_EXPR:
6661
          if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
6662
            {
6663
              tree type = TREE_TYPE (*expr_p);
6664
              *expr_p = fold_convert (type, gimple_boolify (*expr_p));
6665
              ret = GS_OK;
6666
              break;
6667
            }
6668
 
6669
          ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6670
                               is_gimple_val, fb_rvalue);
6671
          recalculate_side_effects (*expr_p);
6672
          break;
6673
 
6674
        case ADDR_EXPR:
6675
          ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6676
          break;
6677
 
6678
        case VA_ARG_EXPR:
6679
          ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6680
          break;
6681
 
6682
        CASE_CONVERT:
6683
          if (IS_EMPTY_STMT (*expr_p))
6684
            {
6685
              ret = GS_ALL_DONE;
6686
              break;
6687
            }
6688
 
6689
          if (VOID_TYPE_P (TREE_TYPE (*expr_p))
6690
              || fallback == fb_none)
6691
            {
6692
              /* Just strip a conversion to void (or in void context) and
6693
                 try again.  */
6694
              *expr_p = TREE_OPERAND (*expr_p, 0);
6695
              break;
6696
            }
6697
 
6698
          ret = gimplify_conversion (expr_p);
6699
          if (ret == GS_ERROR)
6700
            break;
6701
          if (*expr_p != save_expr)
6702
            break;
6703
          /* FALLTHRU */
6704
 
6705
        case FIX_TRUNC_EXPR:
6706
          /* unary_expr: ... | '(' cast ')' val | ...  */
6707
          ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6708
                               is_gimple_val, fb_rvalue);
6709
          recalculate_side_effects (*expr_p);
6710
          break;
6711
 
6712
        case INDIRECT_REF:
6713
          *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
6714
          if (*expr_p != save_expr)
6715
            break;
6716
          /* else fall through.  */
6717
        case ALIGN_INDIRECT_REF:
6718
        case MISALIGNED_INDIRECT_REF:
6719
          ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6720
                               is_gimple_reg, fb_rvalue);
6721
          recalculate_side_effects (*expr_p);
6722
          break;
6723
 
6724
          /* Constants need not be gimplified.  */
6725
        case INTEGER_CST:
6726
        case REAL_CST:
6727
        case FIXED_CST:
6728
        case STRING_CST:
6729
        case COMPLEX_CST:
6730
        case VECTOR_CST:
6731
          ret = GS_ALL_DONE;
6732
          break;
6733
 
6734
        case CONST_DECL:
6735
          /* If we require an lvalue, such as for ADDR_EXPR, retain the
6736
             CONST_DECL node.  Otherwise the decl is replaceable by its
6737
             value.  */
6738
          /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either.  */
6739
          if (fallback & fb_lvalue)
6740
            ret = GS_ALL_DONE;
6741
          else
6742
            *expr_p = DECL_INITIAL (*expr_p);
6743
          break;
6744
 
6745
        case DECL_EXPR:
6746
          ret = gimplify_decl_expr (expr_p, pre_p);
6747
          break;
6748
 
6749
        case BIND_EXPR:
6750
          ret = gimplify_bind_expr (expr_p, pre_p);
6751
          break;
6752
 
6753
        case LOOP_EXPR:
6754
          ret = gimplify_loop_expr (expr_p, pre_p);
6755
          break;
6756
 
6757
        case SWITCH_EXPR:
6758
          ret = gimplify_switch_expr (expr_p, pre_p);
6759
          break;
6760
 
6761
        case EXIT_EXPR:
6762
          ret = gimplify_exit_expr (expr_p);
6763
          break;
6764
 
6765
        case GOTO_EXPR:
6766
          /* If the target is not LABEL, then it is a computed jump
6767
             and the target needs to be gimplified.  */
6768
          if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
6769
            {
6770
              ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
6771
                                   NULL, is_gimple_val, fb_rvalue);
6772
              if (ret == GS_ERROR)
6773
                break;
6774
            }
6775
          gimplify_seq_add_stmt (pre_p,
6776
                          gimple_build_goto (GOTO_DESTINATION (*expr_p)));
6777
          break;
6778
 
6779
        case PREDICT_EXPR:
6780
          gimplify_seq_add_stmt (pre_p,
6781
                        gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
6782
                                              PREDICT_EXPR_OUTCOME (*expr_p)));
6783
          ret = GS_ALL_DONE;
6784
          break;
6785
 
6786
        case LABEL_EXPR:
6787
          ret = GS_ALL_DONE;
6788
          gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
6789
                      == current_function_decl);
6790
          gimplify_seq_add_stmt (pre_p,
6791
                          gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6792
          break;
6793
 
6794
        case CASE_LABEL_EXPR:
6795
          ret = gimplify_case_label_expr (expr_p, pre_p);
6796
          break;
6797
 
6798
        case RETURN_EXPR:
6799
          ret = gimplify_return_expr (*expr_p, pre_p);
6800
          break;
6801
 
6802
        case CONSTRUCTOR:
6803
          /* Don't reduce this in place; let gimplify_init_constructor work its
6804
             magic.  Buf if we're just elaborating this for side effects, just
6805
             gimplify any element that has side-effects.  */
6806
          if (fallback == fb_none)
6807
            {
6808
              unsigned HOST_WIDE_INT ix;
6809
              constructor_elt *ce;
6810
              tree temp = NULL_TREE;
6811
              for (ix = 0;
6812
                   VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
6813
                                ix, ce);
6814
                   ix++)
6815
                if (TREE_SIDE_EFFECTS (ce->value))
6816
                  append_to_statement_list (ce->value, &temp);
6817
 
6818
              *expr_p = temp;
6819
              ret = GS_OK;
6820
            }
6821
          /* C99 code may assign to an array in a constructed
6822
             structure or union, and this has undefined behavior only
6823
             on execution, so create a temporary if an lvalue is
6824
             required.  */
6825
          else if (fallback == fb_lvalue)
6826
            {
6827
              *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6828
              mark_addressable (*expr_p);
6829
            }
6830
          else
6831
            ret = GS_ALL_DONE;
6832
          break;
6833
 
6834
          /* The following are special cases that are not handled by the
6835
             original GIMPLE grammar.  */
6836
 
6837
          /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
6838
             eliminated.  */
6839
        case SAVE_EXPR:
6840
          ret = gimplify_save_expr (expr_p, pre_p, post_p);
6841
          break;
6842
 
6843
        case BIT_FIELD_REF:
6844
          {
6845
            enum gimplify_status r0, r1, r2;
6846
 
6847
            r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6848
                                post_p, is_gimple_lvalue, fb_either);
6849
            r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6850
                                post_p, is_gimple_val, fb_rvalue);
6851
            r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
6852
                                post_p, is_gimple_val, fb_rvalue);
6853
            recalculate_side_effects (*expr_p);
6854
 
6855
            ret = MIN (r0, MIN (r1, r2));
6856
          }
6857
          break;
6858
 
6859
        case TARGET_MEM_REF:
6860
          {
6861
            enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
6862
 
6863
            if (TMR_SYMBOL (*expr_p))
6864
              r0 = gimplify_expr (&TMR_SYMBOL (*expr_p), pre_p,
6865
                                  post_p, is_gimple_lvalue, fb_either);
6866
            else if (TMR_BASE (*expr_p))
6867
              r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
6868
                                  post_p, is_gimple_val, fb_either);
6869
            if (TMR_INDEX (*expr_p))
6870
              r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
6871
                                  post_p, is_gimple_val, fb_rvalue);
6872
            /* TMR_STEP and TMR_OFFSET are always integer constants.  */
6873
            ret = MIN (r0, r1);
6874
          }
6875
          break;
6876
 
6877
        case NON_LVALUE_EXPR:
6878
          /* This should have been stripped above.  */
6879
          gcc_unreachable ();
6880
 
6881
        case ASM_EXPR:
6882
          ret = gimplify_asm_expr (expr_p, pre_p, post_p);
6883
          break;
6884
 
6885
        case TRY_FINALLY_EXPR:
6886
        case TRY_CATCH_EXPR:
6887
          {
6888
            gimple_seq eval, cleanup;
6889
            gimple try_;
6890
 
6891
            eval = cleanup = NULL;
6892
            gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
6893
            gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
6894
            /* Don't create bogus GIMPLE_TRY with empty cleanup.  */
6895
            if (gimple_seq_empty_p (cleanup))
6896
              {
6897
                gimple_seq_add_seq (pre_p, eval);
6898
                ret = GS_ALL_DONE;
6899
                break;
6900
              }
6901
            try_ = gimple_build_try (eval, cleanup,
6902
                                     TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
6903
                                     ? GIMPLE_TRY_FINALLY
6904
                                     : GIMPLE_TRY_CATCH);
6905
            if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
6906
              gimple_try_set_catch_is_cleanup (try_,
6907
                                               TRY_CATCH_IS_CLEANUP (*expr_p));
6908
            gimplify_seq_add_stmt (pre_p, try_);
6909
            ret = GS_ALL_DONE;
6910
            break;
6911
          }
6912
 
6913
        case CLEANUP_POINT_EXPR:
6914
          ret = gimplify_cleanup_point_expr (expr_p, pre_p);
6915
          break;
6916
 
6917
        case TARGET_EXPR:
6918
          ret = gimplify_target_expr (expr_p, pre_p, post_p);
6919
          break;
6920
 
6921
        case CATCH_EXPR:
6922
          {
6923
            gimple c;
6924
            gimple_seq handler = NULL;
6925
            gimplify_and_add (CATCH_BODY (*expr_p), &handler);
6926
            c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
6927
            gimplify_seq_add_stmt (pre_p, c);
6928
            ret = GS_ALL_DONE;
6929
            break;
6930
          }
6931
 
6932
        case EH_FILTER_EXPR:
6933
          {
6934
            gimple ehf;
6935
            gimple_seq failure = NULL;
6936
 
6937
            gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
6938
            ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
6939
            gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
6940
            gimplify_seq_add_stmt (pre_p, ehf);
6941
            ret = GS_ALL_DONE;
6942
            break;
6943
          }
6944
 
6945
        case OBJ_TYPE_REF:
6946
          {
6947
            enum gimplify_status r0, r1;
6948
            r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
6949
                                post_p, is_gimple_val, fb_rvalue);
6950
            r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
6951
                                post_p, is_gimple_val, fb_rvalue);
6952
            TREE_SIDE_EFFECTS (*expr_p) = 0;
6953
            ret = MIN (r0, r1);
6954
          }
6955
          break;
6956
 
6957
        case LABEL_DECL:
6958
          /* We get here when taking the address of a label.  We mark
6959
             the label as "forced"; meaning it can never be removed and
6960
             it is a potential target for any computed goto.  */
6961
          FORCED_LABEL (*expr_p) = 1;
6962
          ret = GS_ALL_DONE;
6963
          break;
6964
 
6965
        case STATEMENT_LIST:
6966
          ret = gimplify_statement_list (expr_p, pre_p);
6967
          break;
6968
 
6969
        case WITH_SIZE_EXPR:
6970
          {
6971
            gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6972
                           post_p == &internal_post ? NULL : post_p,
6973
                           gimple_test_f, fallback);
6974
            gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6975
                           is_gimple_val, fb_rvalue);
6976
          }
6977
          break;
6978
 
6979
        case VAR_DECL:
6980
        case PARM_DECL:
6981
          ret = gimplify_var_or_parm_decl (expr_p);
6982
          break;
6983
 
6984
        case RESULT_DECL:
6985
          /* When within an OpenMP context, notice uses of variables.  */
6986
          if (gimplify_omp_ctxp)
6987
            omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
6988
          ret = GS_ALL_DONE;
6989
          break;
6990
 
6991
        case SSA_NAME:
6992
          /* Allow callbacks into the gimplifier during optimization.  */
6993
          ret = GS_ALL_DONE;
6994
          break;
6995
 
6996
        case OMP_PARALLEL:
6997
          gimplify_omp_parallel (expr_p, pre_p);
6998
          ret = GS_ALL_DONE;
6999
          break;
7000
 
7001
        case OMP_TASK:
7002
          gimplify_omp_task (expr_p, pre_p);
7003
          ret = GS_ALL_DONE;
7004
          break;
7005
 
7006
        case OMP_FOR:
7007
          ret = gimplify_omp_for (expr_p, pre_p);
7008
          break;
7009
 
7010
        case OMP_SECTIONS:
7011
        case OMP_SINGLE:
7012
          gimplify_omp_workshare (expr_p, pre_p);
7013
          ret = GS_ALL_DONE;
7014
          break;
7015
 
7016
        case OMP_SECTION:
7017
        case OMP_MASTER:
7018
        case OMP_ORDERED:
7019
        case OMP_CRITICAL:
7020
          {
7021
            gimple_seq body = NULL;
7022
            gimple g;
7023
 
7024
            gimplify_and_add (OMP_BODY (*expr_p), &body);
7025
            switch (TREE_CODE (*expr_p))
7026
              {
7027
              case OMP_SECTION:
7028
                g = gimple_build_omp_section (body);
7029
                break;
7030
              case OMP_MASTER:
7031
                g = gimple_build_omp_master (body);
7032
                break;
7033
              case OMP_ORDERED:
7034
                g = gimple_build_omp_ordered (body);
7035
                break;
7036
              case OMP_CRITICAL:
7037
                g = gimple_build_omp_critical (body,
7038
                                               OMP_CRITICAL_NAME (*expr_p));
7039
                break;
7040
              default:
7041
                gcc_unreachable ();
7042
              }
7043
            gimplify_seq_add_stmt (pre_p, g);
7044
            ret = GS_ALL_DONE;
7045
            break;
7046
          }
7047
 
7048
        case OMP_ATOMIC:
7049
          ret = gimplify_omp_atomic (expr_p, pre_p);
7050
          break;
7051
 
7052
        case POINTER_PLUS_EXPR:
7053
          /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
7054
             The second is gimple immediate saving a need for extra statement.
7055
           */
7056
          if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7057
              && (tmp = maybe_fold_offset_to_address
7058
                  (EXPR_LOCATION (*expr_p),
7059
                   TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
7060
                   TREE_TYPE (*expr_p))))
7061
            {
7062
              *expr_p = tmp;
7063
              break;
7064
            }
7065
          /* Convert (void *)&a + 4 into (void *)&a[1].  */
7066
          if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
7067
              && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7068
              && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
7069
                                                                        0),0)))
7070
              && (tmp = maybe_fold_offset_to_address
7071
                  (EXPR_LOCATION (*expr_p),
7072
                   TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
7073
                   TREE_OPERAND (*expr_p, 1),
7074
                   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
7075
                                            0)))))
7076
             {
7077
               *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
7078
               break;
7079
             }
7080
          /* FALLTHRU */
7081
 
7082
        default:
7083
          switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
7084
            {
7085
            case tcc_comparison:
7086
              /* Handle comparison of objects of non scalar mode aggregates
7087
                 with a call to memcmp.  It would be nice to only have to do
7088
                 this for variable-sized objects, but then we'd have to allow
7089
                 the same nest of reference nodes we allow for MODIFY_EXPR and
7090
                 that's too complex.
7091
 
7092
                 Compare scalar mode aggregates as scalar mode values.  Using
7093
                 memcmp for them would be very inefficient at best, and is
7094
                 plain wrong if bitfields are involved.  */
7095
                {
7096
                  tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
7097
 
7098
                  if (!AGGREGATE_TYPE_P (type))
7099
                    goto expr_2;
7100
                  else if (TYPE_MODE (type) != BLKmode)
7101
                    ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7102
                  else
7103
                    ret = gimplify_variable_sized_compare (expr_p);
7104
 
7105
                  break;
7106
                }
7107
 
7108
            /* If *EXPR_P does not need to be special-cased, handle it
7109
               according to its class.  */
7110
            case tcc_unary:
7111
              ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7112
                                   post_p, is_gimple_val, fb_rvalue);
7113
              break;
7114
 
7115
            case tcc_binary:
7116
            expr_2:
7117
              {
7118
                enum gimplify_status r0, r1;
7119
 
7120
                r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7121
                                    post_p, is_gimple_val, fb_rvalue);
7122
                r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7123
                                    post_p, is_gimple_val, fb_rvalue);
7124
 
7125
                ret = MIN (r0, r1);
7126
                break;
7127
              }
7128
 
7129
            case tcc_declaration:
7130
            case tcc_constant:
7131
              ret = GS_ALL_DONE;
7132
              goto dont_recalculate;
7133
 
7134
            default:
7135
              gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
7136
                          || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
7137
                          || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
7138
              goto expr_2;
7139
            }
7140
 
7141
          recalculate_side_effects (*expr_p);
7142
 
7143
        dont_recalculate:
7144
          break;
7145
        }
7146
 
7147
      /* If we replaced *expr_p, gimplify again.  */
7148
      if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
7149
        ret = GS_ALL_DONE;
7150
    }
7151
  while (ret == GS_OK);
7152
 
7153
  /* If we encountered an error_mark somewhere nested inside, either
7154
     stub out the statement or propagate the error back out.  */
7155
  if (ret == GS_ERROR)
7156
    {
7157
      if (is_statement)
7158
        *expr_p = NULL;
7159
      goto out;
7160
    }
7161
 
7162
  /* This was only valid as a return value from the langhook, which
7163
     we handled.  Make sure it doesn't escape from any other context.  */
7164
  gcc_assert (ret != GS_UNHANDLED);
7165
 
7166
  if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7167
    {
7168
      /* We aren't looking for a value, and we don't have a valid
7169
         statement.  If it doesn't have side-effects, throw it away.  */
7170
      if (!TREE_SIDE_EFFECTS (*expr_p))
7171
        *expr_p = NULL;
7172
      else if (!TREE_THIS_VOLATILE (*expr_p))
7173
        {
7174
          /* This is probably a _REF that contains something nested that
7175
             has side effects.  Recurse through the operands to find it.  */
7176
          enum tree_code code = TREE_CODE (*expr_p);
7177
 
7178
          switch (code)
7179
            {
7180
            case COMPONENT_REF:
7181
            case REALPART_EXPR:
7182
            case IMAGPART_EXPR:
7183
            case VIEW_CONVERT_EXPR:
7184
              gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7185
                             gimple_test_f, fallback);
7186
              break;
7187
 
7188
            case ARRAY_REF:
7189
            case ARRAY_RANGE_REF:
7190
              gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7191
                             gimple_test_f, fallback);
7192
              gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7193
                             gimple_test_f, fallback);
7194
              break;
7195
 
7196
            default:
7197
               /* Anything else with side-effects must be converted to
7198
                  a valid statement before we get here.  */
7199
              gcc_unreachable ();
7200
            }
7201
 
7202
          *expr_p = NULL;
7203
        }
7204
      else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7205
               && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7206
        {
7207
          /* Historically, the compiler has treated a bare reference
7208
             to a non-BLKmode volatile lvalue as forcing a load.  */
7209
          tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7210
 
7211
          /* Normally, we do not want to create a temporary for a
7212
             TREE_ADDRESSABLE type because such a type should not be
7213
             copied by bitwise-assignment.  However, we make an
7214
             exception here, as all we are doing here is ensuring that
7215
             we read the bytes that make up the type.  We use
7216
             create_tmp_var_raw because create_tmp_var will abort when
7217
             given a TREE_ADDRESSABLE type.  */
7218
          tree tmp = create_tmp_var_raw (type, "vol");
7219
          gimple_add_tmp_var (tmp);
7220
          gimplify_assign (tmp, *expr_p, pre_p);
7221
          *expr_p = NULL;
7222
        }
7223
      else
7224
        /* We can't do anything useful with a volatile reference to
7225
           an incomplete type, so just throw it away.  Likewise for
7226
           a BLKmode type, since any implicit inner load should
7227
           already have been turned into an explicit one by the
7228
           gimplification process.  */
7229
        *expr_p = NULL;
7230
    }
7231
 
7232
  /* If we are gimplifying at the statement level, we're done.  Tack
7233
     everything together and return.  */
7234
  if (fallback == fb_none || is_statement)
7235
    {
7236
      /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7237
         it out for GC to reclaim it.  */
7238
      *expr_p = NULL_TREE;
7239
 
7240
      if (!gimple_seq_empty_p (internal_pre)
7241
          || !gimple_seq_empty_p (internal_post))
7242
        {
7243
          gimplify_seq_add_seq (&internal_pre, internal_post);
7244
          gimplify_seq_add_seq (pre_p, internal_pre);
7245
        }
7246
 
7247
      /* The result of gimplifying *EXPR_P is going to be the last few
7248
         statements in *PRE_P and *POST_P.  Add location information
7249
         to all the statements that were added by the gimplification
7250
         helpers.  */
7251
      if (!gimple_seq_empty_p (*pre_p))
7252
        annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7253
 
7254
      if (!gimple_seq_empty_p (*post_p))
7255
        annotate_all_with_location_after (*post_p, post_last_gsi,
7256
                                          input_location);
7257
 
7258
      goto out;
7259
    }
7260
 
7261
#ifdef ENABLE_GIMPLE_CHECKING
7262
  if (*expr_p)
7263
    {
7264
      enum tree_code code = TREE_CODE (*expr_p);
7265
      /* These expressions should already be in gimple IR form.  */
7266
      gcc_assert (code != MODIFY_EXPR
7267
                  && code != ASM_EXPR
7268
                  && code != BIND_EXPR
7269
                  && code != CATCH_EXPR
7270
                  && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7271
                  && code != EH_FILTER_EXPR
7272
                  && code != GOTO_EXPR
7273
                  && code != LABEL_EXPR
7274
                  && code != LOOP_EXPR
7275
                  && code != SWITCH_EXPR
7276
                  && code != TRY_FINALLY_EXPR
7277
                  && code != OMP_CRITICAL
7278
                  && code != OMP_FOR
7279
                  && code != OMP_MASTER
7280
                  && code != OMP_ORDERED
7281
                  && code != OMP_PARALLEL
7282
                  && code != OMP_SECTIONS
7283
                  && code != OMP_SECTION
7284
                  && code != OMP_SINGLE);
7285
    }
7286
#endif
7287
 
7288
  /* Otherwise we're gimplifying a subexpression, so the resulting
7289
     value is interesting.  If it's a valid operand that matches
7290
     GIMPLE_TEST_F, we're done. Unless we are handling some
7291
     post-effects internally; if that's the case, we need to copy into
7292
     a temporary before adding the post-effects to POST_P.  */
7293
  if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7294
    goto out;
7295
 
7296
  /* Otherwise, we need to create a new temporary for the gimplified
7297
     expression.  */
7298
 
7299
  /* We can't return an lvalue if we have an internal postqueue.  The
7300
     object the lvalue refers to would (probably) be modified by the
7301
     postqueue; we need to copy the value out first, which means an
7302
     rvalue.  */
7303
  if ((fallback & fb_lvalue)
7304
      && gimple_seq_empty_p (internal_post)
7305
      && is_gimple_addressable (*expr_p))
7306
    {
7307
      /* An lvalue will do.  Take the address of the expression, store it
7308
         in a temporary, and replace the expression with an INDIRECT_REF of
7309
         that temporary.  */
7310
      tmp = build_fold_addr_expr_loc (input_location, *expr_p);
7311
      gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7312
      *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
7313
    }
7314
  else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
7315
    {
7316
      /* An rvalue will do.  Assign the gimplified expression into a
7317
         new temporary TMP and replace the original expression with
7318
         TMP.  First, make sure that the expression has a type so that
7319
         it can be assigned into a temporary.  */
7320
      gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
7321
 
7322
      if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
7323
        /* The postqueue might change the value of the expression between
7324
           the initialization and use of the temporary, so we can't use a
7325
           formal temp.  FIXME do we care?  */
7326
        {
7327
          *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7328
          if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE
7329
              || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE)
7330
            DECL_GIMPLE_REG_P (*expr_p) = 1;
7331
        }
7332
      else
7333
        *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7334
    }
7335
  else
7336
    {
7337
#ifdef ENABLE_GIMPLE_CHECKING
7338
      if (!(fallback & fb_mayfail))
7339
        {
7340
          fprintf (stderr, "gimplification failed:\n");
7341
          print_generic_expr (stderr, *expr_p, 0);
7342
          debug_tree (*expr_p);
7343
          internal_error ("gimplification failed");
7344
        }
7345
#endif
7346
      gcc_assert (fallback & fb_mayfail);
7347
 
7348
      /* If this is an asm statement, and the user asked for the
7349
         impossible, don't die.  Fail and let gimplify_asm_expr
7350
         issue an error.  */
7351
      ret = GS_ERROR;
7352
      goto out;
7353
    }
7354
 
7355
  /* Make sure the temporary matches our predicate.  */
7356
  gcc_assert ((*gimple_test_f) (*expr_p));
7357
 
7358
  if (!gimple_seq_empty_p (internal_post))
7359
    {
7360
      annotate_all_with_location (internal_post, input_location);
7361
      gimplify_seq_add_seq (pre_p, internal_post);
7362
    }
7363
 
7364
 out:
7365
  input_location = saved_location;
7366
  return ret;
7367
}
7368
 
7369
/* Look through TYPE for variable-sized objects and gimplify each such
7370
   size that we find.  Add to LIST_P any statements generated.  */
7371
 
7372
void
7373
gimplify_type_sizes (tree type, gimple_seq *list_p)
7374
{
7375
  tree field, t;
7376
 
7377
  if (type == NULL || type == error_mark_node)
7378
    return;
7379
 
7380
  /* We first do the main variant, then copy into any other variants.  */
7381
  type = TYPE_MAIN_VARIANT (type);
7382
 
7383
  /* Avoid infinite recursion.  */
7384
  if (TYPE_SIZES_GIMPLIFIED (type))
7385
    return;
7386
 
7387
  TYPE_SIZES_GIMPLIFIED (type) = 1;
7388
 
7389
  switch (TREE_CODE (type))
7390
    {
7391
    case INTEGER_TYPE:
7392
    case ENUMERAL_TYPE:
7393
    case BOOLEAN_TYPE:
7394
    case REAL_TYPE:
7395
    case FIXED_POINT_TYPE:
7396
      gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7397
      gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
7398
 
7399
      for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7400
        {
7401
          TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7402
          TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
7403
        }
7404
      break;
7405
 
7406
    case ARRAY_TYPE:
7407
      /* These types may not have declarations, so handle them here.  */
7408
      gimplify_type_sizes (TREE_TYPE (type), list_p);
7409
      gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7410
      /* Ensure VLA bounds aren't removed, for -O0 they should be variables
7411
         with assigned stack slots, for -O1+ -g they should be tracked
7412
         by VTA.  */
7413
      if (TYPE_DOMAIN (type)
7414
          && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7415
        {
7416
          t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7417
          if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7418
            DECL_IGNORED_P (t) = 0;
7419
          t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7420
          if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7421
            DECL_IGNORED_P (t) = 0;
7422
        }
7423
      break;
7424
 
7425
    case RECORD_TYPE:
7426
    case UNION_TYPE:
7427
    case QUAL_UNION_TYPE:
7428
      for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7429
        if (TREE_CODE (field) == FIELD_DECL)
7430
          {
7431
            gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7432
            gimplify_one_sizepos (&DECL_SIZE (field), list_p);
7433
            gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
7434
            gimplify_type_sizes (TREE_TYPE (field), list_p);
7435
          }
7436
      break;
7437
 
7438
    case POINTER_TYPE:
7439
    case REFERENCE_TYPE:
7440
        /* We used to recurse on the pointed-to type here, which turned out to
7441
           be incorrect because its definition might refer to variables not
7442
           yet initialized at this point if a forward declaration is involved.
7443
 
7444
           It was actually useful for anonymous pointed-to types to ensure
7445
           that the sizes evaluation dominates every possible later use of the
7446
           values.  Restricting to such types here would be safe since there
7447
           is no possible forward declaration around, but would introduce an
7448
           undesirable middle-end semantic to anonymity.  We then defer to
7449
           front-ends the responsibility of ensuring that the sizes are
7450
           evaluated both early and late enough, e.g. by attaching artificial
7451
           type declarations to the tree.  */
7452
      break;
7453
 
7454
    default:
7455
      break;
7456
    }
7457
 
7458
  gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7459
  gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
7460
 
7461
  for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7462
    {
7463
      TYPE_SIZE (t) = TYPE_SIZE (type);
7464
      TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7465
      TYPE_SIZES_GIMPLIFIED (t) = 1;
7466
    }
7467
}
7468
 
7469
/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7470
   a size or position, has had all of its SAVE_EXPRs evaluated.
7471
   We add any required statements to *STMT_P.  */
7472
 
7473
void
7474
gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
7475
{
7476
  tree type, expr = *expr_p;
7477
 
7478
  /* We don't do anything if the value isn't there, is constant, or contains
7479
     A PLACEHOLDER_EXPR.  We also don't want to do anything if it's already
7480
     a VAR_DECL.  If it's a VAR_DECL from another function, the gimplifier
7481
     will want to replace it with a new variable, but that will cause problems
7482
     if this type is from outside the function.  It's OK to have that here.  */
7483
  if (expr == NULL_TREE || TREE_CONSTANT (expr)
7484
      || TREE_CODE (expr) == VAR_DECL
7485
      || CONTAINS_PLACEHOLDER_P (expr))
7486
    return;
7487
 
7488
  type = TREE_TYPE (expr);
7489
  *expr_p = unshare_expr (expr);
7490
 
7491
  gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
7492
  expr = *expr_p;
7493
 
7494
  /* Verify that we've an exact type match with the original expression.
7495
     In particular, we do not wish to drop a "sizetype" in favour of a
7496
     type of similar dimensions.  We don't want to pollute the generic
7497
     type-stripping code with this knowledge because it doesn't matter
7498
     for the bulk of GENERIC/GIMPLE.  It only matters that TYPE_SIZE_UNIT
7499
     and friends retain their "sizetype-ness".  */
7500
  if (TREE_TYPE (expr) != type
7501
      && TREE_CODE (type) == INTEGER_TYPE
7502
      && TYPE_IS_SIZETYPE (type))
7503
    {
7504
      tree tmp;
7505
      gimple stmt;
7506
 
7507
      *expr_p = create_tmp_var (type, NULL);
7508
      tmp = build1 (NOP_EXPR, type, expr);
7509
      stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7510
      if (EXPR_HAS_LOCATION (expr))
7511
        gimple_set_location (stmt, EXPR_LOCATION (expr));
7512
      else
7513
        gimple_set_location (stmt, input_location);
7514
    }
7515
}
7516
 
7517
 
7518
/* Gimplify the body of statements pointed to by BODY_P and return a
7519
   GIMPLE_BIND containing the sequence of GIMPLE statements
7520
   corresponding to BODY_P.  FNDECL is the function decl containing
7521
   *BODY_P.  */
7522
 
7523
gimple
7524
gimplify_body (tree *body_p, tree fndecl, bool do_parms)
7525
{
7526
  location_t saved_location = input_location;
7527
  gimple_seq parm_stmts, seq;
7528
  gimple outer_bind;
7529
  struct gimplify_ctx gctx;
7530
 
7531
  timevar_push (TV_TREE_GIMPLIFY);
7532
 
7533
  /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7534
     gimplification.  */
7535
  default_rtl_profile ();
7536
 
7537
  gcc_assert (gimplify_ctxp == NULL);
7538
  push_gimplify_context (&gctx);
7539
 
7540
  /* Unshare most shared trees in the body and in that of any nested functions.
7541
     It would seem we don't have to do this for nested functions because
7542
     they are supposed to be output and then the outer function gimplified
7543
     first, but the g++ front end doesn't always do it that way.  */
7544
  unshare_body (body_p, fndecl);
7545
  unvisit_body (body_p, fndecl);
7546
 
7547
  if (cgraph_node (fndecl)->origin)
7548
    nonlocal_vlas = pointer_set_create ();
7549
 
7550
  /* Make sure input_location isn't set to something weird.  */
7551
  input_location = DECL_SOURCE_LOCATION (fndecl);
7552
 
7553
  /* Resolve callee-copies.  This has to be done before processing
7554
     the body so that DECL_VALUE_EXPR gets processed correctly.  */
7555
  parm_stmts = (do_parms) ? gimplify_parameters () : NULL;
7556
 
7557
  /* Gimplify the function's body.  */
7558
  seq = NULL;
7559
  gimplify_stmt (body_p, &seq);
7560
  outer_bind = gimple_seq_first_stmt (seq);
7561
  if (!outer_bind)
7562
    {
7563
      outer_bind = gimple_build_nop ();
7564
      gimplify_seq_add_stmt (&seq, outer_bind);
7565
    }
7566
 
7567
  /* The body must contain exactly one statement, a GIMPLE_BIND.  If this is
7568
     not the case, wrap everything in a GIMPLE_BIND to make it so.  */
7569
  if (gimple_code (outer_bind) == GIMPLE_BIND
7570
      && gimple_seq_first (seq) == gimple_seq_last (seq))
7571
    ;
7572
  else
7573
    outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
7574
 
7575
  *body_p = NULL_TREE;
7576
 
7577
  /* If we had callee-copies statements, insert them at the beginning
7578
     of the function and clear DECL_VALUE_EXPR_P on the parameters.  */
7579
  if (!gimple_seq_empty_p (parm_stmts))
7580
    {
7581
      tree parm;
7582
 
7583
      gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
7584
      gimple_bind_set_body (outer_bind, parm_stmts);
7585
 
7586
      for (parm = DECL_ARGUMENTS (current_function_decl);
7587
           parm; parm = TREE_CHAIN (parm))
7588
        if (DECL_HAS_VALUE_EXPR_P (parm))
7589
          {
7590
            DECL_HAS_VALUE_EXPR_P (parm) = 0;
7591
            DECL_IGNORED_P (parm) = 0;
7592
          }
7593
    }
7594
 
7595
  if (nonlocal_vlas)
7596
    {
7597
      pointer_set_destroy (nonlocal_vlas);
7598
      nonlocal_vlas = NULL;
7599
    }
7600
 
7601
  pop_gimplify_context (outer_bind);
7602
  gcc_assert (gimplify_ctxp == NULL);
7603
 
7604
#ifdef ENABLE_TYPES_CHECKING
7605
  if (!errorcount && !sorrycount)
7606
    verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
7607
#endif
7608
 
7609
  timevar_pop (TV_TREE_GIMPLIFY);
7610
  input_location = saved_location;
7611
 
7612
  return outer_bind;
7613
}
7614
 
7615
/* Entry point to the gimplification pass.  FNDECL is the FUNCTION_DECL
7616
   node for the function we want to gimplify.
7617
 
7618
   Returns the sequence of GIMPLE statements corresponding to the body
7619
   of FNDECL.  */
7620
 
7621
void
7622
gimplify_function_tree (tree fndecl)
7623
{
7624
  tree oldfn, parm, ret;
7625
  gimple_seq seq;
7626
  gimple bind;
7627
 
7628
  gcc_assert (!gimple_body (fndecl));
7629
 
7630
  oldfn = current_function_decl;
7631
  current_function_decl = fndecl;
7632
  if (DECL_STRUCT_FUNCTION (fndecl))
7633
    push_cfun (DECL_STRUCT_FUNCTION (fndecl));
7634
  else
7635
    push_struct_function (fndecl);
7636
 
7637
  for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
7638
    {
7639
      /* Preliminarily mark non-addressed complex variables as eligible
7640
         for promotion to gimple registers.  We'll transform their uses
7641
         as we find them.  */
7642
      if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
7643
           || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
7644
          && !TREE_THIS_VOLATILE (parm)
7645
          && !needs_to_live_in_memory (parm))
7646
        DECL_GIMPLE_REG_P (parm) = 1;
7647
    }
7648
 
7649
  ret = DECL_RESULT (fndecl);
7650
  if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7651
       || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
7652
      && !needs_to_live_in_memory (ret))
7653
    DECL_GIMPLE_REG_P (ret) = 1;
7654
 
7655
  bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
7656
 
7657
  /* The tree body of the function is no longer needed, replace it
7658
     with the new GIMPLE body.  */
7659
  seq = gimple_seq_alloc ();
7660
  gimple_seq_add_stmt (&seq, bind);
7661
  gimple_set_body (fndecl, seq);
7662
 
7663
  /* If we're instrumenting function entry/exit, then prepend the call to
7664
     the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
7665
     catch the exit hook.  */
7666
  /* ??? Add some way to ignore exceptions for this TFE.  */
7667
  if (flag_instrument_function_entry_exit
7668
      && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
7669
      && !flag_instrument_functions_exclude_p (fndecl))
7670
    {
7671
      tree x;
7672
      gimple new_bind;
7673
      gimple tf;
7674
      gimple_seq cleanup = NULL, body = NULL;
7675
 
7676
      x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
7677
      gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0));
7678
      tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
7679
 
7680
      x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
7681
      gimplify_seq_add_stmt (&body, gimple_build_call (x, 0));
7682
      gimplify_seq_add_stmt (&body, tf);
7683
      new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
7684
      /* Clear the block for BIND, since it is no longer directly inside
7685
         the function, but within a try block.  */
7686
      gimple_bind_set_block (bind, NULL);
7687
 
7688
      /* Replace the current function body with the body
7689
         wrapped in the try/finally TF.  */
7690
      seq = gimple_seq_alloc ();
7691
      gimple_seq_add_stmt (&seq, new_bind);
7692
      gimple_set_body (fndecl, seq);
7693
    }
7694
 
7695
  DECL_SAVED_TREE (fndecl) = NULL_TREE;
7696
  cfun->curr_properties = PROP_gimple_any;
7697
 
7698
  current_function_decl = oldfn;
7699
  pop_cfun ();
7700
}
7701
 
7702
 
7703
/* Some transformations like inlining may invalidate the GIMPLE form
7704
   for operands.  This function traverses all the operands in STMT and
7705
   gimplifies anything that is not a valid gimple operand.  Any new
7706
   GIMPLE statements are inserted before *GSI_P.  */
7707
 
7708
void
7709
gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
7710
{
7711
  size_t i, num_ops;
7712
  tree orig_lhs = NULL_TREE, lhs, t;
7713
  gimple_seq pre = NULL;
7714
  gimple post_stmt = NULL;
7715
  struct gimplify_ctx gctx;
7716
 
7717
  push_gimplify_context (&gctx);
7718
  gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7719
 
7720
  switch (gimple_code (stmt))
7721
    {
7722
    case GIMPLE_COND:
7723
      gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
7724
                     is_gimple_val, fb_rvalue);
7725
      gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
7726
                     is_gimple_val, fb_rvalue);
7727
      break;
7728
    case GIMPLE_SWITCH:
7729
      gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
7730
                     is_gimple_val, fb_rvalue);
7731
      break;
7732
    case GIMPLE_OMP_ATOMIC_LOAD:
7733
      gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
7734
                     is_gimple_val, fb_rvalue);
7735
      break;
7736
    case GIMPLE_ASM:
7737
      {
7738
        size_t i, noutputs = gimple_asm_noutputs (stmt);
7739
        const char *constraint, **oconstraints;
7740
        bool allows_mem, allows_reg, is_inout;
7741
 
7742
        oconstraints
7743
          = (const char **) alloca ((noutputs) * sizeof (const char *));
7744
        for (i = 0; i < noutputs; i++)
7745
          {
7746
            tree op = gimple_asm_output_op (stmt, i);
7747
            constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7748
            oconstraints[i] = constraint;
7749
            parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
7750
                                     &allows_reg, &is_inout);
7751
            gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7752
                           is_inout ? is_gimple_min_lval : is_gimple_lvalue,
7753
                           fb_lvalue | fb_mayfail);
7754
          }
7755
        for (i = 0; i < gimple_asm_ninputs (stmt); i++)
7756
          {
7757
            tree op = gimple_asm_input_op (stmt, i);
7758
            constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7759
            parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7760
                                    oconstraints, &allows_mem, &allows_reg);
7761
            if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
7762
              allows_reg = 0;
7763
            if (!allows_reg && allows_mem)
7764
              gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7765
                             is_gimple_lvalue, fb_lvalue | fb_mayfail);
7766
            else
7767
              gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7768
                             is_gimple_asm_val, fb_rvalue);
7769
          }
7770
      }
7771
      break;
7772
    default:
7773
      /* NOTE: We start gimplifying operands from last to first to
7774
         make sure that side-effects on the RHS of calls, assignments
7775
         and ASMs are executed before the LHS.  The ordering is not
7776
         important for other statements.  */
7777
      num_ops = gimple_num_ops (stmt);
7778
      orig_lhs = gimple_get_lhs (stmt);
7779
      for (i = num_ops; i > 0; i--)
7780
        {
7781
          tree op = gimple_op (stmt, i - 1);
7782
          if (op == NULL_TREE)
7783
            continue;
7784
          if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
7785
            gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
7786
          else if (i == 2
7787
                   && is_gimple_assign (stmt)
7788
                   && num_ops == 2
7789
                   && get_gimple_rhs_class (gimple_expr_code (stmt))
7790
                      == GIMPLE_SINGLE_RHS)
7791
            gimplify_expr (&op, &pre, NULL,
7792
                           rhs_predicate_for (gimple_assign_lhs (stmt)),
7793
                           fb_rvalue);
7794
          else if (i == 2 && is_gimple_call (stmt))
7795
            {
7796
              if (TREE_CODE (op) == FUNCTION_DECL)
7797
                continue;
7798
              gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
7799
            }
7800
          else
7801
            gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
7802
          gimple_set_op (stmt, i - 1, op);
7803
        }
7804
 
7805
      lhs = gimple_get_lhs (stmt);
7806
      /* If the LHS changed it in a way that requires a simple RHS,
7807
         create temporary.  */
7808
      if (lhs && !is_gimple_reg (lhs))
7809
        {
7810
          bool need_temp = false;
7811
 
7812
          if (is_gimple_assign (stmt)
7813
              && num_ops == 2
7814
              && get_gimple_rhs_class (gimple_expr_code (stmt))
7815
                 == GIMPLE_SINGLE_RHS)
7816
            gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
7817
                           rhs_predicate_for (gimple_assign_lhs (stmt)),
7818
                           fb_rvalue);
7819
          else if (is_gimple_reg (lhs))
7820
            {
7821
              if (is_gimple_reg_type (TREE_TYPE (lhs)))
7822
                {
7823
                  if (is_gimple_call (stmt))
7824
                    {
7825
                      i = gimple_call_flags (stmt);
7826
                      if ((i & ECF_LOOPING_CONST_OR_PURE)
7827
                          || !(i & (ECF_CONST | ECF_PURE)))
7828
                        need_temp = true;
7829
                    }
7830
                  if (stmt_can_throw_internal (stmt))
7831
                    need_temp = true;
7832
                }
7833
            }
7834
          else
7835
            {
7836
              if (is_gimple_reg_type (TREE_TYPE (lhs)))
7837
                need_temp = true;
7838
              else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
7839
                {
7840
                  if (is_gimple_call (stmt))
7841
                    {
7842
                      tree fndecl = gimple_call_fndecl (stmt);
7843
 
7844
                      if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
7845
                          && !(fndecl && DECL_RESULT (fndecl)
7846
                               && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
7847
                        need_temp = true;
7848
                    }
7849
                  else
7850
                    need_temp = true;
7851
                }
7852
            }
7853
          if (need_temp)
7854
            {
7855
              tree temp = create_tmp_var (TREE_TYPE (lhs), NULL);
7856
 
7857
              if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
7858
                  || TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE)
7859
                DECL_GIMPLE_REG_P (temp) = 1;
7860
              if (TREE_CODE (orig_lhs) == SSA_NAME)
7861
                orig_lhs = SSA_NAME_VAR (orig_lhs);
7862
 
7863
              if (gimple_in_ssa_p (cfun))
7864
                temp = make_ssa_name (temp, NULL);
7865
              gimple_set_lhs (stmt, temp);
7866
              post_stmt = gimple_build_assign (lhs, temp);
7867
              if (TREE_CODE (lhs) == SSA_NAME)
7868
                SSA_NAME_DEF_STMT (lhs) = post_stmt;
7869
            }
7870
        }
7871
      break;
7872
    }
7873
 
7874
  if (gimple_referenced_vars (cfun))
7875
    for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7876
      add_referenced_var (t);
7877
 
7878
  if (!gimple_seq_empty_p (pre))
7879
    {
7880
      if (gimple_in_ssa_p (cfun))
7881
        {
7882
          gimple_stmt_iterator i;
7883
 
7884
          for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
7885
            mark_symbols_for_renaming (gsi_stmt (i));
7886
        }
7887
      gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
7888
    }
7889
  if (post_stmt)
7890
    gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
7891
 
7892
  pop_gimplify_context (NULL);
7893
}
7894
 
7895
 
7896
/* Expands EXPR to list of gimple statements STMTS.  If SIMPLE is true,
7897
   force the result to be either ssa_name or an invariant, otherwise
7898
   just force it to be a rhs expression.  If VAR is not NULL, make the
7899
   base variable of the final destination be VAR if suitable.  */
7900
 
7901
tree
7902
force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
7903
{
7904
  tree t;
7905
  enum gimplify_status ret;
7906
  gimple_predicate gimple_test_f;
7907
  struct gimplify_ctx gctx;
7908
 
7909
  *stmts = NULL;
7910
 
7911
  if (is_gimple_val (expr))
7912
    return expr;
7913
 
7914
  gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
7915
 
7916
  push_gimplify_context (&gctx);
7917
  gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7918
  gimplify_ctxp->allow_rhs_cond_expr = true;
7919
 
7920
  if (var)
7921
    expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
7922
 
7923
  if (TREE_CODE (expr) != MODIFY_EXPR
7924
      && TREE_TYPE (expr) == void_type_node)
7925
    {
7926
      gimplify_and_add (expr, stmts);
7927
      expr = NULL_TREE;
7928
    }
7929
  else
7930
    {
7931
      ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
7932
      gcc_assert (ret != GS_ERROR);
7933
    }
7934
 
7935
  if (gimple_referenced_vars (cfun))
7936
    for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7937
      add_referenced_var (t);
7938
 
7939
  pop_gimplify_context (NULL);
7940
 
7941
  return expr;
7942
}
7943
 
7944
/* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR.  If
7945
   some statements are produced, emits them at GSI.  If BEFORE is true.
7946
   the statements are appended before GSI, otherwise they are appended after
7947
   it.  M specifies the way GSI moves after insertion (GSI_SAME_STMT or
7948
   GSI_CONTINUE_LINKING are the usual values).  */
7949
 
7950
tree
7951
force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
7952
                          bool simple_p, tree var, bool before,
7953
                          enum gsi_iterator_update m)
7954
{
7955
  gimple_seq stmts;
7956
 
7957
  expr = force_gimple_operand (expr, &stmts, simple_p, var);
7958
 
7959
  if (!gimple_seq_empty_p (stmts))
7960
    {
7961
      if (gimple_in_ssa_p (cfun))
7962
        {
7963
          gimple_stmt_iterator i;
7964
 
7965
          for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
7966
            mark_symbols_for_renaming (gsi_stmt (i));
7967
        }
7968
 
7969
      if (before)
7970
        gsi_insert_seq_before (gsi, stmts, m);
7971
      else
7972
        gsi_insert_seq_after (gsi, stmts, m);
7973
    }
7974
 
7975
  return expr;
7976
}
7977
 
7978
#include "gt-gimplify.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.