OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [cp/] [cp-gimplify.c] - Blame information for rev 297

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 283 jeremybenn
/* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2
 
3
   Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4
   Free Software Foundation, Inc.
5
   Contributed by Jason Merrill <jason@redhat.com>
6
 
7
This file is part of GCC.
8
 
9
GCC is free software; you can redistribute it and/or modify it under
10
the terms of the GNU General Public License as published by the Free
11
Software Foundation; either version 3, or (at your option) any later
12
version.
13
 
14
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15
WARRANTY; without even the implied warranty of MERCHANTABILITY or
16
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17
for more details.
18
 
19
You should have received a copy of the GNU General Public License
20
along with GCC; see the file COPYING3.  If not see
21
<http://www.gnu.org/licenses/>.  */
22
 
23
#include "config.h"
24
#include "system.h"
25
#include "coretypes.h"
26
#include "tm.h"
27
#include "tree.h"
28
#include "cp-tree.h"
29
#include "c-common.h"
30
#include "toplev.h"
31
#include "tree-iterator.h"
32
#include "gimple.h"
33
#include "hashtab.h"
34
#include "pointer-set.h"
35
#include "flags.h"
36
 
37
/* Local declarations.  */
38
 
39
enum bc_t { bc_break = 0, bc_continue = 1 };
40
 
41
/* Stack of labels which are targets for "break" or "continue",
42
   linked through TREE_CHAIN.  */
43
static tree bc_label[2];
44
 
45
/* Begin a scope which can be exited by a break or continue statement.  BC
46
   indicates which.
47
 
48
   Just creates a label and pushes it into the current context.  */
49
 
50
static tree
51
begin_bc_block (enum bc_t bc)
52
{
53
  tree label = create_artificial_label (input_location);
54
  TREE_CHAIN (label) = bc_label[bc];
55
  bc_label[bc] = label;
56
  return label;
57
}
58
 
59
/* Finish a scope which can be exited by a break or continue statement.
60
   LABEL was returned from the most recent call to begin_bc_block.  BODY is
61
   an expression for the contents of the scope.
62
 
63
   If we saw a break (or continue) in the scope, append a LABEL_EXPR to
64
   body.  Otherwise, just forget the label.  */
65
 
66
static gimple_seq
67
finish_bc_block (enum bc_t bc, tree label, gimple_seq body)
68
{
69
  gcc_assert (label == bc_label[bc]);
70
 
71
  if (TREE_USED (label))
72
    {
73
      gimple_seq_add_stmt (&body, gimple_build_label (label));
74
    }
75
 
76
  bc_label[bc] = TREE_CHAIN (label);
77
  TREE_CHAIN (label) = NULL_TREE;
78
  return body;
79
}
80
 
81
/* Get the LABEL_EXPR to represent a break or continue statement
82
   in the current block scope.  BC indicates which.  */
83
 
84
static tree
85
get_bc_label (enum bc_t bc)
86
{
87
  tree label = bc_label[bc];
88
 
89
  if (label == NULL_TREE)
90
    {
91
      if (bc == bc_break)
92
        error ("break statement not within loop or switch");
93
      else
94
        error ("continue statement not within loop or switch");
95
 
96
      return NULL_TREE;
97
    }
98
 
99
  /* Mark the label used for finish_bc_block.  */
100
  TREE_USED (label) = 1;
101
  return label;
102
}
103
 
104
/* Genericize a TRY_BLOCK.  */
105
 
106
static void
107
genericize_try_block (tree *stmt_p)
108
{
109
  tree body = TRY_STMTS (*stmt_p);
110
  tree cleanup = TRY_HANDLERS (*stmt_p);
111
 
112
  *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
113
}
114
 
115
/* Genericize a HANDLER by converting to a CATCH_EXPR.  */
116
 
117
static void
118
genericize_catch_block (tree *stmt_p)
119
{
120
  tree type = HANDLER_TYPE (*stmt_p);
121
  tree body = HANDLER_BODY (*stmt_p);
122
 
123
  /* FIXME should the caught type go in TREE_TYPE?  */
124
  *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
125
}
126
 
127
/* A terser interface for building a representation of an exception
128
   specification.  */
129
 
130
static tree
131
build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
132
{
133
  tree t;
134
 
135
  /* FIXME should the allowed types go in TREE_TYPE?  */
136
  t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
137
  append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
138
 
139
  t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
140
  append_to_statement_list (body, &TREE_OPERAND (t, 0));
141
 
142
  return t;
143
}
144
 
145
/* Genericize an EH_SPEC_BLOCK by converting it to a
146
   TRY_CATCH_EXPR/EH_FILTER_EXPR pair.  */
147
 
148
static void
149
genericize_eh_spec_block (tree *stmt_p)
150
{
151
  tree body = EH_SPEC_STMTS (*stmt_p);
152
  tree allowed = EH_SPEC_RAISES (*stmt_p);
153
  tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
154
 
155
  *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
156
  TREE_NO_WARNING (*stmt_p) = true;
157
  TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
158
}
159
 
160
/* Genericize an IF_STMT by turning it into a COND_EXPR.  */
161
 
162
static void
163
genericize_if_stmt (tree *stmt_p)
164
{
165
  tree stmt, cond, then_, else_;
166
  location_t locus = EXPR_LOCATION (*stmt_p);
167
 
168
  stmt = *stmt_p;
169
  cond = IF_COND (stmt);
170
  then_ = THEN_CLAUSE (stmt);
171
  else_ = ELSE_CLAUSE (stmt);
172
 
173
  if (!then_)
174
    then_ = build_empty_stmt (locus);
175
  if (!else_)
176
    else_ = build_empty_stmt (locus);
177
 
178
  if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
179
    stmt = then_;
180
  else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
181
    stmt = else_;
182
  else
183
    stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
184
  if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
185
    SET_EXPR_LOCATION (stmt, locus);
186
  *stmt_p = stmt;
187
}
188
 
189
/* Build a generic representation of one of the C loop forms.  COND is the
190
   loop condition or NULL_TREE.  BODY is the (possibly compound) statement
191
   controlled by the loop.  INCR is the increment expression of a for-loop,
192
   or NULL_TREE.  COND_IS_FIRST indicates whether the condition is
193
   evaluated before the loop body as in while and for loops, or after the
194
   loop body as in do-while loops.  */
195
 
196
static gimple_seq
197
gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
198
{
199
  gimple top, entry, stmt;
200
  gimple_seq stmt_list, body_seq, incr_seq, exit_seq;
201
  tree cont_block, break_block;
202
  location_t stmt_locus;
203
 
204
  stmt_locus = input_location;
205
  stmt_list = NULL;
206
  body_seq = NULL;
207
  incr_seq = NULL;
208
  exit_seq = NULL;
209
  entry = NULL;
210
 
211
  break_block = begin_bc_block (bc_break);
212
  cont_block = begin_bc_block (bc_continue);
213
 
214
  /* If condition is zero don't generate a loop construct.  */
215
  if (cond && integer_zerop (cond))
216
    {
217
      top = NULL;
218
      if (cond_is_first)
219
        {
220
          stmt = gimple_build_goto (get_bc_label (bc_break));
221
          gimple_set_location (stmt, stmt_locus);
222
          gimple_seq_add_stmt (&stmt_list, stmt);
223
        }
224
    }
225
  else
226
    {
227
      /* If we use a LOOP_EXPR here, we have to feed the whole thing
228
         back through the main gimplifier to lower it.  Given that we
229
         have to gimplify the loop body NOW so that we can resolve
230
         break/continue stmts, seems easier to just expand to gotos.  */
231
      top = gimple_build_label (create_artificial_label (stmt_locus));
232
 
233
      /* If we have an exit condition, then we build an IF with gotos either
234
         out of the loop, or to the top of it.  If there's no exit condition,
235
         then we just build a jump back to the top.  */
236
      if (cond && !integer_nonzerop (cond))
237
        {
238
          if (cond != error_mark_node)
239
            {
240
              gimplify_expr (&cond, &exit_seq, NULL, is_gimple_val, fb_rvalue);
241
              stmt = gimple_build_cond (NE_EXPR, cond,
242
                                        build_int_cst (TREE_TYPE (cond), 0),
243
                                        gimple_label_label (top),
244
                                        get_bc_label (bc_break));
245
              gimple_seq_add_stmt (&exit_seq, stmt);
246
            }
247
 
248
          if (cond_is_first)
249
            {
250
              if (incr)
251
                {
252
                  entry = gimple_build_label
253
                    (create_artificial_label (stmt_locus));
254
                  stmt = gimple_build_goto (gimple_label_label (entry));
255
                }
256
              else
257
                stmt = gimple_build_goto (get_bc_label (bc_continue));
258
              gimple_set_location (stmt, stmt_locus);
259
              gimple_seq_add_stmt (&stmt_list, stmt);
260
            }
261
        }
262
      else
263
        {
264
          stmt = gimple_build_goto (gimple_label_label (top));
265
          gimple_seq_add_stmt (&exit_seq, stmt);
266
        }
267
    }
268
 
269
  gimplify_stmt (&body, &body_seq);
270
  gimplify_stmt (&incr, &incr_seq);
271
 
272
  body_seq = finish_bc_block (bc_continue, cont_block, body_seq);
273
 
274
  gimple_seq_add_stmt (&stmt_list, top);
275
  gimple_seq_add_seq (&stmt_list, body_seq);
276
  gimple_seq_add_seq (&stmt_list, incr_seq);
277
  gimple_seq_add_stmt (&stmt_list, entry);
278
  gimple_seq_add_seq (&stmt_list, exit_seq);
279
 
280
  annotate_all_with_location (stmt_list, stmt_locus);
281
 
282
  return finish_bc_block (bc_break, break_block, stmt_list);
283
}
284
 
285
/* Gimplify a FOR_STMT node.  Move the stuff in the for-init-stmt into the
286
   prequeue and hand off to gimplify_cp_loop.  */
287
 
288
static void
289
gimplify_for_stmt (tree *stmt_p, gimple_seq *pre_p)
290
{
291
  tree stmt = *stmt_p;
292
 
293
  if (FOR_INIT_STMT (stmt))
294
    gimplify_and_add (FOR_INIT_STMT (stmt), pre_p);
295
 
296
  gimple_seq_add_seq (pre_p,
297
                      gimplify_cp_loop (FOR_COND (stmt), FOR_BODY (stmt),
298
                                        FOR_EXPR (stmt), 1));
299
  *stmt_p = NULL_TREE;
300
}
301
 
302
/* Gimplify a WHILE_STMT node.  */
303
 
304
static void
305
gimplify_while_stmt (tree *stmt_p, gimple_seq *pre_p)
306
{
307
  tree stmt = *stmt_p;
308
  gimple_seq_add_seq (pre_p,
309
                      gimplify_cp_loop (WHILE_COND (stmt), WHILE_BODY (stmt),
310
                                        NULL_TREE, 1));
311
  *stmt_p = NULL_TREE;
312
}
313
 
314
/* Gimplify a DO_STMT node.  */
315
 
316
static void
317
gimplify_do_stmt (tree *stmt_p, gimple_seq *pre_p)
318
{
319
  tree stmt = *stmt_p;
320
  gimple_seq_add_seq (pre_p,
321
                      gimplify_cp_loop (DO_COND (stmt), DO_BODY (stmt),
322
                                        NULL_TREE, 0));
323
  *stmt_p = NULL_TREE;
324
}
325
 
326
/* Genericize a SWITCH_STMT by turning it into a SWITCH_EXPR.  */
327
 
328
static void
329
gimplify_switch_stmt (tree *stmt_p, gimple_seq *pre_p)
330
{
331
  tree stmt = *stmt_p;
332
  tree break_block, body, t;
333
  location_t stmt_locus = input_location;
334
  gimple_seq seq = NULL;
335
 
336
  break_block = begin_bc_block (bc_break);
337
 
338
  body = SWITCH_STMT_BODY (stmt);
339
  if (!body)
340
    body = build_empty_stmt (stmt_locus);
341
 
342
  t = build3 (SWITCH_EXPR, SWITCH_STMT_TYPE (stmt),
343
              SWITCH_STMT_COND (stmt), body, NULL_TREE);
344
  SET_EXPR_LOCATION (t, stmt_locus);
345
  gimplify_and_add (t, &seq);
346
 
347
  seq = finish_bc_block (bc_break, break_block, seq);
348
  gimple_seq_add_seq (pre_p, seq);
349
  *stmt_p = NULL_TREE;
350
}
351
 
352
/* Hook into the middle of gimplifying an OMP_FOR node.  This is required
353
   in order to properly gimplify CONTINUE statements.  Here we merely
354
   manage the continue stack; the rest of the job is performed by the
355
   regular gimplifier.  */
356
 
357
static enum gimplify_status
358
cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
359
{
360
  tree for_stmt = *expr_p;
361
  tree cont_block;
362
  gimple stmt;
363
  gimple_seq seq = NULL;
364
 
365
  /* Protect ourselves from recursion.  */
366
  if (OMP_FOR_GIMPLIFYING_P (for_stmt))
367
    return GS_UNHANDLED;
368
  OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
369
 
370
  /* Note that while technically the continue label is enabled too soon
371
     here, we should have already diagnosed invalid continues nested within
372
     statement expressions within the INIT, COND, or INCR expressions.  */
373
  cont_block = begin_bc_block (bc_continue);
374
 
375
  gimplify_and_add (for_stmt, &seq);
376
  stmt = gimple_seq_last_stmt (seq);
377
  if (gimple_code (stmt) == GIMPLE_OMP_FOR)
378
    gimple_omp_set_body (stmt, finish_bc_block (bc_continue, cont_block,
379
                                                gimple_omp_body (stmt)));
380
  else
381
    seq = finish_bc_block (bc_continue, cont_block, seq);
382
  gimple_seq_add_seq (pre_p, seq);
383
 
384
  OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
385
 
386
  return GS_ALL_DONE;
387
}
388
 
389
/*  Gimplify an EXPR_STMT node.  */
390
 
391
static void
392
gimplify_expr_stmt (tree *stmt_p)
393
{
394
  tree stmt = EXPR_STMT_EXPR (*stmt_p);
395
 
396
  if (stmt == error_mark_node)
397
    stmt = NULL;
398
 
399
  /* Gimplification of a statement expression will nullify the
400
     statement if all its side effects are moved to *PRE_P and *POST_P.
401
 
402
     In this case we will not want to emit the gimplified statement.
403
     However, we may still want to emit a warning, so we do that before
404
     gimplification.  */
405
  if (stmt && warn_unused_value)
406
    {
407
      if (!TREE_SIDE_EFFECTS (stmt))
408
        {
409
          if (!IS_EMPTY_STMT (stmt)
410
              && !VOID_TYPE_P (TREE_TYPE (stmt))
411
              && !TREE_NO_WARNING (stmt))
412
            warning (OPT_Wunused_value, "statement with no effect");
413
        }
414
      else
415
        warn_if_unused_value (stmt, input_location);
416
    }
417
 
418
  if (stmt == NULL_TREE)
419
    stmt = alloc_stmt_list ();
420
 
421
  *stmt_p = stmt;
422
}
423
 
424
/* Gimplify initialization from an AGGR_INIT_EXPR.  */
425
 
426
static void
427
cp_gimplify_init_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
428
{
429
  tree from = TREE_OPERAND (*expr_p, 1);
430
  tree to = TREE_OPERAND (*expr_p, 0);
431
  tree t;
432
 
433
  /* What about code that pulls out the temp and uses it elsewhere?  I
434
     think that such code never uses the TARGET_EXPR as an initializer.  If
435
     I'm wrong, we'll abort because the temp won't have any RTL.  In that
436
     case, I guess we'll need to replace references somehow.  */
437
  if (TREE_CODE (from) == TARGET_EXPR)
438
    from = TARGET_EXPR_INITIAL (from);
439
 
440
  /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
441
     inside the TARGET_EXPR.  */
442
  for (t = from; t; )
443
    {
444
      tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
445
 
446
      /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
447
         replace the slot operand with our target.
448
 
449
         Should we add a target parm to gimplify_expr instead?  No, as in this
450
         case we want to replace the INIT_EXPR.  */
451
      if (TREE_CODE (sub) == AGGR_INIT_EXPR
452
          || TREE_CODE (sub) == VEC_INIT_EXPR)
453
        {
454
          gimplify_expr (&to, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
455
          if (TREE_CODE (sub) == AGGR_INIT_EXPR)
456
            AGGR_INIT_EXPR_SLOT (sub) = to;
457
          else
458
            VEC_INIT_EXPR_SLOT (sub) = to;
459
          *expr_p = from;
460
 
461
          /* The initialization is now a side-effect, so the container can
462
             become void.  */
463
          if (from != sub)
464
            TREE_TYPE (from) = void_type_node;
465
        }
466
 
467
      if (t == sub)
468
        break;
469
      else
470
        t = TREE_OPERAND (t, 1);
471
    }
472
 
473
}
474
 
475
/* Gimplify a MUST_NOT_THROW_EXPR.  */
476
 
477
static enum gimplify_status
478
gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
479
{
480
  tree stmt = *expr_p;
481
  tree temp = voidify_wrapper_expr (stmt, NULL);
482
  tree body = TREE_OPERAND (stmt, 0);
483
  gimple_seq try_ = NULL;
484
  gimple_seq catch_ = NULL;
485
  gimple mnt;
486
 
487
  gimplify_and_add (body, &try_);
488
  mnt = gimple_build_eh_must_not_throw (terminate_node);
489
  gimplify_seq_add_stmt (&catch_, mnt);
490
  mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
491
 
492
  gimplify_seq_add_stmt (pre_p, mnt);
493
  if (temp)
494
    {
495
      *expr_p = temp;
496
      return GS_OK;
497
    }
498
 
499
  *expr_p = NULL;
500
  return GS_ALL_DONE;
501
}
502
 
503
/* Do C++-specific gimplification.  Args are as for gimplify_expr.  */
504
 
505
int
506
cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
507
{
508
  int saved_stmts_are_full_exprs_p = 0;
509
  enum tree_code code = TREE_CODE (*expr_p);
510
  enum gimplify_status ret;
511
 
512
  if (STATEMENT_CODE_P (code))
513
    {
514
      saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
515
      current_stmt_tree ()->stmts_are_full_exprs_p
516
        = STMT_IS_FULL_EXPR_P (*expr_p);
517
    }
518
 
519
  switch (code)
520
    {
521
    case PTRMEM_CST:
522
      *expr_p = cplus_expand_constant (*expr_p);
523
      ret = GS_OK;
524
      break;
525
 
526
    case AGGR_INIT_EXPR:
527
      simplify_aggr_init_expr (expr_p);
528
      ret = GS_OK;
529
      break;
530
 
531
    case VEC_INIT_EXPR:
532
      {
533
        location_t loc = input_location;
534
        gcc_assert (EXPR_HAS_LOCATION (*expr_p));
535
        input_location = EXPR_LOCATION (*expr_p);
536
        *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
537
                                  VEC_INIT_EXPR_INIT (*expr_p), false, 1,
538
                                  tf_warning_or_error);
539
        ret = GS_OK;
540
        input_location = loc;
541
      }
542
      break;
543
 
544
    case THROW_EXPR:
545
      /* FIXME communicate throw type to back end, probably by moving
546
         THROW_EXPR into ../tree.def.  */
547
      *expr_p = TREE_OPERAND (*expr_p, 0);
548
      ret = GS_OK;
549
      break;
550
 
551
    case MUST_NOT_THROW_EXPR:
552
      ret = gimplify_must_not_throw_expr (expr_p, pre_p);
553
      break;
554
 
555
      /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
556
         LHS of an assignment might also be involved in the RHS, as in bug
557
         25979.  */
558
    case INIT_EXPR:
559
      cp_gimplify_init_expr (expr_p, pre_p, post_p);
560
      if (TREE_CODE (*expr_p) != INIT_EXPR)
561
        return GS_OK;
562
      /* Otherwise fall through.  */
563
    case MODIFY_EXPR:
564
      {
565
        /* If the back end isn't clever enough to know that the lhs and rhs
566
           types are the same, add an explicit conversion.  */
567
        tree op0 = TREE_OPERAND (*expr_p, 0);
568
        tree op1 = TREE_OPERAND (*expr_p, 1);
569
 
570
        if (!error_operand_p (op0)
571
            && !error_operand_p (op1)
572
            && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
573
                || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
574
            && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
575
          TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
576
                                              TREE_TYPE (op0), op1);
577
 
578
        else if ((rhs_predicate_for (op0)) (op1)
579
                 && !(TREE_CODE (op1) == CALL_EXPR
580
                      && CALL_EXPR_RETURN_SLOT_OPT (op1))
581
                 && is_really_empty_class (TREE_TYPE (op0)))
582
          {
583
            /* Remove any copies of empty classes.  We check that the RHS
584
               has a simple form so that TARGET_EXPRs and CONSTRUCTORs get
585
               reduced properly, and we leave the return slot optimization
586
               alone because it isn't a copy.
587
 
588
               Also drop volatile variables on the RHS to avoid infinite
589
               recursion from gimplify_expr trying to load the value.  */
590
            if (!TREE_SIDE_EFFECTS (op1)
591
                || (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
592
              *expr_p = op0;
593
            else
594
              *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
595
                                op0, op1);
596
          }
597
      }
598
      ret = GS_OK;
599
      break;
600
 
601
    case EMPTY_CLASS_EXPR:
602
      /* We create an empty CONSTRUCTOR with RECORD_TYPE.  */
603
      *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
604
      ret = GS_OK;
605
      break;
606
 
607
    case BASELINK:
608
      *expr_p = BASELINK_FUNCTIONS (*expr_p);
609
      ret = GS_OK;
610
      break;
611
 
612
    case TRY_BLOCK:
613
      genericize_try_block (expr_p);
614
      ret = GS_OK;
615
      break;
616
 
617
    case HANDLER:
618
      genericize_catch_block (expr_p);
619
      ret = GS_OK;
620
      break;
621
 
622
    case EH_SPEC_BLOCK:
623
      genericize_eh_spec_block (expr_p);
624
      ret = GS_OK;
625
      break;
626
 
627
    case USING_STMT:
628
      gcc_unreachable ();
629
 
630
    case FOR_STMT:
631
      gimplify_for_stmt (expr_p, pre_p);
632
      ret = GS_OK;
633
      break;
634
 
635
    case WHILE_STMT:
636
      gimplify_while_stmt (expr_p, pre_p);
637
      ret = GS_OK;
638
      break;
639
 
640
    case DO_STMT:
641
      gimplify_do_stmt (expr_p, pre_p);
642
      ret = GS_OK;
643
      break;
644
 
645
    case SWITCH_STMT:
646
      gimplify_switch_stmt (expr_p, pre_p);
647
      ret = GS_OK;
648
      break;
649
 
650
    case OMP_FOR:
651
      ret = cp_gimplify_omp_for (expr_p, pre_p);
652
      break;
653
 
654
    case CONTINUE_STMT:
655
      gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_CONTINUE, NOT_TAKEN));
656
      gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_continue)));
657
      *expr_p = NULL_TREE;
658
      ret = GS_ALL_DONE;
659
      break;
660
 
661
    case BREAK_STMT:
662
      gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_break)));
663
      *expr_p = NULL_TREE;
664
      ret = GS_ALL_DONE;
665
      break;
666
 
667
    case EXPR_STMT:
668
      gimplify_expr_stmt (expr_p);
669
      ret = GS_OK;
670
      break;
671
 
672
    case UNARY_PLUS_EXPR:
673
      {
674
        tree arg = TREE_OPERAND (*expr_p, 0);
675
        tree type = TREE_TYPE (*expr_p);
676
        *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
677
                                            : arg;
678
        ret = GS_OK;
679
      }
680
      break;
681
 
682
    default:
683
      ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
684
      break;
685
    }
686
 
687
  /* Restore saved state.  */
688
  if (STATEMENT_CODE_P (code))
689
    current_stmt_tree ()->stmts_are_full_exprs_p
690
      = saved_stmts_are_full_exprs_p;
691
 
692
  return ret;
693
}
694
 
695
static inline bool
696
is_invisiref_parm (const_tree t)
697
{
698
  return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
699
          && DECL_BY_REFERENCE (t));
700
}
701
 
702
/* Return true if the uid in both int tree maps are equal.  */
703
 
704
int
705
cxx_int_tree_map_eq (const void *va, const void *vb)
706
{
707
  const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
708
  const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
709
  return (a->uid == b->uid);
710
}
711
 
712
/* Hash a UID in a cxx_int_tree_map.  */
713
 
714
unsigned int
715
cxx_int_tree_map_hash (const void *item)
716
{
717
  return ((const struct cxx_int_tree_map *)item)->uid;
718
}
719
 
720
struct cp_genericize_data
721
{
722
  struct pointer_set_t *p_set;
723
  VEC (tree, heap) *bind_expr_stack;
724
};
725
 
726
/* Perform any pre-gimplification lowering of C++ front end trees to
727
   GENERIC.  */
728
 
729
static tree
730
cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
731
{
732
  tree stmt = *stmt_p;
733
  struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
734
  struct pointer_set_t *p_set = wtd->p_set;
735
 
736
  if (is_invisiref_parm (stmt)
737
      /* Don't dereference parms in a thunk, pass the references through. */
738
      && !(DECL_THUNK_P (current_function_decl)
739
           && TREE_CODE (stmt) == PARM_DECL))
740
    {
741
      *stmt_p = convert_from_reference (stmt);
742
      *walk_subtrees = 0;
743
      return NULL;
744
    }
745
 
746
  /* Map block scope extern declarations to visible declarations with the
747
     same name and type in outer scopes if any.  */
748
  if (cp_function_chain->extern_decl_map
749
      && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
750
      && DECL_EXTERNAL (stmt))
751
    {
752
      struct cxx_int_tree_map *h, in;
753
      in.uid = DECL_UID (stmt);
754
      h = (struct cxx_int_tree_map *)
755
          htab_find_with_hash (cp_function_chain->extern_decl_map,
756
                               &in, in.uid);
757
      if (h)
758
        {
759
          *stmt_p = h->to;
760
          *walk_subtrees = 0;
761
          return NULL;
762
        }
763
    }
764
 
765
  /* Other than invisiref parms, don't walk the same tree twice.  */
766
  if (pointer_set_contains (p_set, stmt))
767
    {
768
      *walk_subtrees = 0;
769
      return NULL_TREE;
770
    }
771
 
772
  if (TREE_CODE (stmt) == ADDR_EXPR
773
      && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
774
    {
775
      *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
776
      *walk_subtrees = 0;
777
    }
778
  else if (TREE_CODE (stmt) == RETURN_EXPR
779
           && TREE_OPERAND (stmt, 0)
780
           && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
781
    /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
782
    *walk_subtrees = 0;
783
  else if (TREE_CODE (stmt) == OMP_CLAUSE)
784
    switch (OMP_CLAUSE_CODE (stmt))
785
      {
786
      case OMP_CLAUSE_LASTPRIVATE:
787
        /* Don't dereference an invisiref in OpenMP clauses.  */
788
        if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
789
          {
790
            *walk_subtrees = 0;
791
            if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
792
              cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
793
                            cp_genericize_r, data, NULL);
794
          }
795
        break;
796
      case OMP_CLAUSE_PRIVATE:
797
      case OMP_CLAUSE_SHARED:
798
      case OMP_CLAUSE_FIRSTPRIVATE:
799
      case OMP_CLAUSE_COPYIN:
800
      case OMP_CLAUSE_COPYPRIVATE:
801
        /* Don't dereference an invisiref in OpenMP clauses.  */
802
        if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
803
          *walk_subtrees = 0;
804
        break;
805
      case OMP_CLAUSE_REDUCTION:
806
        gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
807
        break;
808
      default:
809
        break;
810
      }
811
  else if (IS_TYPE_OR_DECL_P (stmt))
812
    *walk_subtrees = 0;
813
 
814
  /* Due to the way voidify_wrapper_expr is written, we don't get a chance
815
     to lower this construct before scanning it, so we need to lower these
816
     before doing anything else.  */
817
  else if (TREE_CODE (stmt) == CLEANUP_STMT)
818
    *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
819
                                             : TRY_FINALLY_EXPR,
820
                      void_type_node,
821
                      CLEANUP_BODY (stmt),
822
                      CLEANUP_EXPR (stmt));
823
 
824
  else if (TREE_CODE (stmt) == IF_STMT)
825
    {
826
      genericize_if_stmt (stmt_p);
827
      /* *stmt_p has changed, tail recurse to handle it again.  */
828
      return cp_genericize_r (stmt_p, walk_subtrees, data);
829
    }
830
 
831
  /* COND_EXPR might have incompatible types in branches if one or both
832
     arms are bitfields.  Fix it up now.  */
833
  else if (TREE_CODE (stmt) == COND_EXPR)
834
    {
835
      tree type_left
836
        = (TREE_OPERAND (stmt, 1)
837
           ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
838
           : NULL_TREE);
839
      tree type_right
840
        = (TREE_OPERAND (stmt, 2)
841
           ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
842
           : NULL_TREE);
843
      if (type_left
844
          && !useless_type_conversion_p (TREE_TYPE (stmt),
845
                                         TREE_TYPE (TREE_OPERAND (stmt, 1))))
846
        {
847
          TREE_OPERAND (stmt, 1)
848
            = fold_convert (type_left, TREE_OPERAND (stmt, 1));
849
          gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
850
                                                 type_left));
851
        }
852
      if (type_right
853
          && !useless_type_conversion_p (TREE_TYPE (stmt),
854
                                         TREE_TYPE (TREE_OPERAND (stmt, 2))))
855
        {
856
          TREE_OPERAND (stmt, 2)
857
            = fold_convert (type_right, TREE_OPERAND (stmt, 2));
858
          gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
859
                                                 type_right));
860
        }
861
    }
862
 
863
  else if (TREE_CODE (stmt) == BIND_EXPR)
864
    {
865
      VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
866
      cp_walk_tree (&BIND_EXPR_BODY (stmt),
867
                    cp_genericize_r, data, NULL);
868
      VEC_pop (tree, wtd->bind_expr_stack);
869
    }
870
 
871
  else if (TREE_CODE (stmt) == USING_STMT)
872
    {
873
      tree block = NULL_TREE;
874
 
875
      /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
876
         BLOCK, and append an IMPORTED_DECL to its
877
         BLOCK_VARS chained list.  */
878
      if (wtd->bind_expr_stack)
879
        {
880
          int i;
881
          for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
882
            if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
883
                                                     wtd->bind_expr_stack, i))))
884
              break;
885
        }
886
      if (block)
887
        {
888
          tree using_directive;
889
          gcc_assert (TREE_OPERAND (stmt, 0));
890
 
891
          using_directive = make_node (IMPORTED_DECL);
892
          TREE_TYPE (using_directive) = void_type_node;
893
 
894
          IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
895
            = TREE_OPERAND (stmt, 0);
896
          TREE_CHAIN (using_directive) = BLOCK_VARS (block);
897
          BLOCK_VARS (block) = using_directive;
898
        }
899
      /* The USING_STMT won't appear in GENERIC.  */
900
      *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
901
      *walk_subtrees = 0;
902
    }
903
 
904
  else if (TREE_CODE (stmt) == DECL_EXPR
905
           && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
906
    {
907
      /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
908
      *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
909
      *walk_subtrees = 0;
910
    }
911
 
912
  pointer_set_insert (p_set, *stmt_p);
913
 
914
  return NULL;
915
}
916
 
917
void
918
cp_genericize (tree fndecl)
919
{
920
  tree t;
921
  struct cp_genericize_data wtd;
922
 
923
  /* Fix up the types of parms passed by invisible reference.  */
924
  for (t = DECL_ARGUMENTS (fndecl); t; t = TREE_CHAIN (t))
925
    if (TREE_ADDRESSABLE (TREE_TYPE (t)))
926
      {
927
        /* If a function's arguments are copied to create a thunk,
928
           then DECL_BY_REFERENCE will be set -- but the type of the
929
           argument will be a pointer type, so we will never get
930
           here.  */
931
        gcc_assert (!DECL_BY_REFERENCE (t));
932
        gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
933
        TREE_TYPE (t) = DECL_ARG_TYPE (t);
934
        DECL_BY_REFERENCE (t) = 1;
935
        TREE_ADDRESSABLE (t) = 0;
936
        relayout_decl (t);
937
      }
938
 
939
  /* Do the same for the return value.  */
940
  if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
941
    {
942
      t = DECL_RESULT (fndecl);
943
      TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
944
      DECL_BY_REFERENCE (t) = 1;
945
      TREE_ADDRESSABLE (t) = 0;
946
      relayout_decl (t);
947
    }
948
 
949
  /* If we're a clone, the body is already GIMPLE.  */
950
  if (DECL_CLONED_FUNCTION_P (fndecl))
951
    return;
952
 
953
  /* We do want to see every occurrence of the parms, so we can't just use
954
     walk_tree's hash functionality.  */
955
  wtd.p_set = pointer_set_create ();
956
  wtd.bind_expr_stack = NULL;
957
  cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_genericize_r, &wtd, NULL);
958
  pointer_set_destroy (wtd.p_set);
959
  VEC_free (tree, heap, wtd.bind_expr_stack);
960
 
961
  /* Do everything else.  */
962
  c_genericize (fndecl);
963
 
964
  gcc_assert (bc_label[bc_break] == NULL);
965
  gcc_assert (bc_label[bc_continue] == NULL);
966
}
967
 
968
/* Build code to apply FN to each member of ARG1 and ARG2.  FN may be
969
   NULL if there is in fact nothing to do.  ARG2 may be null if FN
970
   actually only takes one argument.  */
971
 
972
static tree
973
cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
974
{
975
  tree defparm, parm, t;
976
  int i = 0;
977
  int nargs;
978
  tree *argarray;
979
 
980
  if (fn == NULL)
981
    return NULL;
982
 
983
  nargs = list_length (DECL_ARGUMENTS (fn));
984
  argarray = (tree *) alloca (nargs * sizeof (tree));
985
 
986
  defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
987
  if (arg2)
988
    defparm = TREE_CHAIN (defparm);
989
 
990
  if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
991
    {
992
      tree inner_type = TREE_TYPE (arg1);
993
      tree start1, end1, p1;
994
      tree start2 = NULL, p2 = NULL;
995
      tree ret = NULL, lab;
996
 
997
      start1 = arg1;
998
      start2 = arg2;
999
      do
1000
        {
1001
          inner_type = TREE_TYPE (inner_type);
1002
          start1 = build4 (ARRAY_REF, inner_type, start1,
1003
                           size_zero_node, NULL, NULL);
1004
          if (arg2)
1005
            start2 = build4 (ARRAY_REF, inner_type, start2,
1006
                             size_zero_node, NULL, NULL);
1007
        }
1008
      while (TREE_CODE (inner_type) == ARRAY_TYPE);
1009
      start1 = build_fold_addr_expr_loc (input_location, start1);
1010
      if (arg2)
1011
        start2 = build_fold_addr_expr_loc (input_location, start2);
1012
 
1013
      end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1014
      end1 = build2 (POINTER_PLUS_EXPR, TREE_TYPE (start1), start1, end1);
1015
 
1016
      p1 = create_tmp_var (TREE_TYPE (start1), NULL);
1017
      t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1018
      append_to_statement_list (t, &ret);
1019
 
1020
      if (arg2)
1021
        {
1022
          p2 = create_tmp_var (TREE_TYPE (start2), NULL);
1023
          t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1024
          append_to_statement_list (t, &ret);
1025
        }
1026
 
1027
      lab = create_artificial_label (input_location);
1028
      t = build1 (LABEL_EXPR, void_type_node, lab);
1029
      append_to_statement_list (t, &ret);
1030
 
1031
      argarray[i++] = p1;
1032
      if (arg2)
1033
        argarray[i++] = p2;
1034
      /* Handle default arguments.  */
1035
      for (parm = defparm; parm && parm != void_list_node;
1036
           parm = TREE_CHAIN (parm), i++)
1037
        argarray[i] = convert_default_arg (TREE_VALUE (parm),
1038
                                           TREE_PURPOSE (parm), fn, i);
1039
      t = build_call_a (fn, i, argarray);
1040
      t = fold_convert (void_type_node, t);
1041
      t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1042
      append_to_statement_list (t, &ret);
1043
 
1044
      t = TYPE_SIZE_UNIT (inner_type);
1045
      t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p1), p1, t);
1046
      t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1047
      append_to_statement_list (t, &ret);
1048
 
1049
      if (arg2)
1050
        {
1051
          t = TYPE_SIZE_UNIT (inner_type);
1052
          t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p2), p2, t);
1053
          t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1054
          append_to_statement_list (t, &ret);
1055
        }
1056
 
1057
      t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1058
      t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1059
      append_to_statement_list (t, &ret);
1060
 
1061
      return ret;
1062
    }
1063
  else
1064
    {
1065
      argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1066
      if (arg2)
1067
        argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1068
      /* Handle default arguments.  */
1069
      for (parm = defparm; parm && parm != void_list_node;
1070
           parm = TREE_CHAIN (parm), i++)
1071
        argarray[i] = convert_default_arg (TREE_VALUE (parm),
1072
                                           TREE_PURPOSE (parm),
1073
                                           fn, i);
1074
      t = build_call_a (fn, i, argarray);
1075
      t = fold_convert (void_type_node, t);
1076
      return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1077
    }
1078
}
1079
 
1080
/* Return code to initialize DECL with its default constructor, or
1081
   NULL if there's nothing to do.  */
1082
 
1083
tree
1084
cxx_omp_clause_default_ctor (tree clause, tree decl,
1085
                             tree outer ATTRIBUTE_UNUSED)
1086
{
1087
  tree info = CP_OMP_CLAUSE_INFO (clause);
1088
  tree ret = NULL;
1089
 
1090
  if (info)
1091
    ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1092
 
1093
  return ret;
1094
}
1095
 
1096
/* Return code to initialize DST with a copy constructor from SRC.  */
1097
 
1098
tree
1099
cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1100
{
1101
  tree info = CP_OMP_CLAUSE_INFO (clause);
1102
  tree ret = NULL;
1103
 
1104
  if (info)
1105
    ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1106
  if (ret == NULL)
1107
    ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1108
 
1109
  return ret;
1110
}
1111
 
1112
/* Similarly, except use an assignment operator instead.  */
1113
 
1114
tree
1115
cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1116
{
1117
  tree info = CP_OMP_CLAUSE_INFO (clause);
1118
  tree ret = NULL;
1119
 
1120
  if (info)
1121
    ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1122
  if (ret == NULL)
1123
    ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1124
 
1125
  return ret;
1126
}
1127
 
1128
/* Return code to destroy DECL.  */
1129
 
1130
tree
1131
cxx_omp_clause_dtor (tree clause, tree decl)
1132
{
1133
  tree info = CP_OMP_CLAUSE_INFO (clause);
1134
  tree ret = NULL;
1135
 
1136
  if (info)
1137
    ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1138
 
1139
  return ret;
1140
}
1141
 
1142
/* True if OpenMP should privatize what this DECL points to rather
1143
   than the DECL itself.  */
1144
 
1145
bool
1146
cxx_omp_privatize_by_reference (const_tree decl)
1147
{
1148
  return is_invisiref_parm (decl);
1149
}
1150
 
1151
/* True if OpenMP sharing attribute of DECL is predetermined.  */
1152
 
1153
enum omp_clause_default_kind
1154
cxx_omp_predetermined_sharing (tree decl)
1155
{
1156
  tree type;
1157
 
1158
  /* Static data members are predetermined as shared.  */
1159
  if (TREE_STATIC (decl))
1160
    {
1161
      tree ctx = CP_DECL_CONTEXT (decl);
1162
      if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1163
        return OMP_CLAUSE_DEFAULT_SHARED;
1164
    }
1165
 
1166
  type = TREE_TYPE (decl);
1167
  if (TREE_CODE (type) == REFERENCE_TYPE)
1168
    {
1169
      if (!is_invisiref_parm (decl))
1170
        return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1171
      type = TREE_TYPE (type);
1172
 
1173
      if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1174
        {
1175
          /* NVR doesn't preserve const qualification of the
1176
             variable's type.  */
1177
          tree outer = outer_curly_brace_block (current_function_decl);
1178
          tree var;
1179
 
1180
          if (outer)
1181
            for (var = BLOCK_VARS (outer); var; var = TREE_CHAIN (var))
1182
              if (DECL_NAME (decl) == DECL_NAME (var)
1183
                  && (TYPE_MAIN_VARIANT (type)
1184
                      == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1185
                {
1186
                  if (TYPE_READONLY (TREE_TYPE (var)))
1187
                    type = TREE_TYPE (var);
1188
                  break;
1189
                }
1190
        }
1191
    }
1192
 
1193
  if (type == error_mark_node)
1194
    return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1195
 
1196
  /* Variables with const-qualified type having no mutable member
1197
     are predetermined shared.  */
1198
  if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1199
    return OMP_CLAUSE_DEFAULT_SHARED;
1200
 
1201
  return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1202
}
1203
 
1204
/* Finalize an implicitly determined clause.  */
1205
 
1206
void
1207
cxx_omp_finish_clause (tree c)
1208
{
1209
  tree decl, inner_type;
1210
  bool make_shared = false;
1211
 
1212
  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1213
    return;
1214
 
1215
  decl = OMP_CLAUSE_DECL (c);
1216
  decl = require_complete_type (decl);
1217
  inner_type = TREE_TYPE (decl);
1218
  if (decl == error_mark_node)
1219
    make_shared = true;
1220
  else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1221
    {
1222
      if (is_invisiref_parm (decl))
1223
        inner_type = TREE_TYPE (inner_type);
1224
      else
1225
        {
1226
          error ("%qE implicitly determined as %<firstprivate%> has reference type",
1227
                 decl);
1228
          make_shared = true;
1229
        }
1230
    }
1231
 
1232
  /* We're interested in the base element, not arrays.  */
1233
  while (TREE_CODE (inner_type) == ARRAY_TYPE)
1234
    inner_type = TREE_TYPE (inner_type);
1235
 
1236
  /* Check for special function availability by building a call to one.
1237
     Save the results, because later we won't be in the right context
1238
     for making these queries.  */
1239
  if (!make_shared
1240
      && CLASS_TYPE_P (inner_type)
1241
      && cxx_omp_create_clause_info (c, inner_type, false, true, false))
1242
    make_shared = true;
1243
 
1244
  if (make_shared)
1245
    OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1246
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.