OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [fortran/] [trans-stmt.c] - Blame information for rev 309

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 285 jeremybenn
/* Statement translation -- generate GCC trees from gfc_code.
2
   Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4
   Contributed by Paul Brook <paul@nowt.org>
5
   and Steven Bosscher <s.bosscher@student.tudelft.nl>
6
 
7
This file is part of GCC.
8
 
9
GCC is free software; you can redistribute it and/or modify it under
10
the terms of the GNU General Public License as published by the Free
11
Software Foundation; either version 3, or (at your option) any later
12
version.
13
 
14
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15
WARRANTY; without even the implied warranty of MERCHANTABILITY or
16
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17
for more details.
18
 
19
You should have received a copy of the GNU General Public License
20
along with GCC; see the file COPYING3.  If not see
21
<http://www.gnu.org/licenses/>.  */
22
 
23
 
24
#include "config.h"
25
#include "system.h"
26
#include "coretypes.h"
27
#include "tree.h"
28
#include "gimple.h"
29
#include "ggc.h"
30
#include "toplev.h"
31
#include "real.h"
32
#include "gfortran.h"
33
#include "flags.h"
34
#include "trans.h"
35
#include "trans-stmt.h"
36
#include "trans-types.h"
37
#include "trans-array.h"
38
#include "trans-const.h"
39
#include "arith.h"
40
#include "dependency.h"
41
 
42
typedef struct iter_info
43
{
44
  tree var;
45
  tree start;
46
  tree end;
47
  tree step;
48
  struct iter_info *next;
49
}
50
iter_info;
51
 
52
typedef struct forall_info
53
{
54
  iter_info *this_loop;
55
  tree mask;
56
  tree maskindex;
57
  int nvar;
58
  tree size;
59
  struct forall_info  *prev_nest;
60
}
61
forall_info;
62
 
63
static void gfc_trans_where_2 (gfc_code *, tree, bool,
64
                               forall_info *, stmtblock_t *);
65
 
66
/* Translate a F95 label number to a LABEL_EXPR.  */
67
 
68
tree
69
gfc_trans_label_here (gfc_code * code)
70
{
71
  return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
72
}
73
 
74
 
75
/* Given a variable expression which has been ASSIGNed to, find the decl
76
   containing the auxiliary variables.  For variables in common blocks this
77
   is a field_decl.  */
78
 
79
void
80
gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
81
{
82
  gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83
  gfc_conv_expr (se, expr);
84
  /* Deals with variable in common block. Get the field declaration.  */
85
  if (TREE_CODE (se->expr) == COMPONENT_REF)
86
    se->expr = TREE_OPERAND (se->expr, 1);
87
  /* Deals with dummy argument. Get the parameter declaration.  */
88
  else if (TREE_CODE (se->expr) == INDIRECT_REF)
89
    se->expr = TREE_OPERAND (se->expr, 0);
90
}
91
 
92
/* Translate a label assignment statement.  */
93
 
94
tree
95
gfc_trans_label_assign (gfc_code * code)
96
{
97
  tree label_tree;
98
  gfc_se se;
99
  tree len;
100
  tree addr;
101
  tree len_tree;
102
  int label_len;
103
 
104
  /* Start a new block.  */
105
  gfc_init_se (&se, NULL);
106
  gfc_start_block (&se.pre);
107
  gfc_conv_label_variable (&se, code->expr1);
108
 
109
  len = GFC_DECL_STRING_LEN (se.expr);
110
  addr = GFC_DECL_ASSIGN_ADDR (se.expr);
111
 
112
  label_tree = gfc_get_label_decl (code->label1);
113
 
114
  if (code->label1->defined == ST_LABEL_TARGET)
115
    {
116
      label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117
      len_tree = integer_minus_one_node;
118
    }
119
  else
120
    {
121
      gfc_expr *format = code->label1->format;
122
 
123
      label_len = format->value.character.length;
124
      len_tree = build_int_cst (NULL_TREE, label_len);
125
      label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126
                                                format->value.character.string);
127
      label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
128
    }
129
 
130
  gfc_add_modify (&se.pre, len, len_tree);
131
  gfc_add_modify (&se.pre, addr, label_tree);
132
 
133
  return gfc_finish_block (&se.pre);
134
}
135
 
136
/* Translate a GOTO statement.  */
137
 
138
tree
139
gfc_trans_goto (gfc_code * code)
140
{
141
  locus loc = code->loc;
142
  tree assigned_goto;
143
  tree target;
144
  tree tmp;
145
  gfc_se se;
146
 
147
  if (code->label1 != NULL)
148
    return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
149
 
150
  /* ASSIGNED GOTO.  */
151
  gfc_init_se (&se, NULL);
152
  gfc_start_block (&se.pre);
153
  gfc_conv_label_variable (&se, code->expr1);
154
  tmp = GFC_DECL_STRING_LEN (se.expr);
155
  tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156
                     build_int_cst (TREE_TYPE (tmp), -1));
157
  gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158
                           "Assigned label is not a target label");
159
 
160
  assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
161
 
162
  /* We're going to ignore a label list.  It does not really change the
163
     statement's semantics (because it is just a further restriction on
164
     what's legal code); before, we were comparing label addresses here, but
165
     that's a very fragile business and may break with optimization.  So
166
     just ignore it.  */
167
 
168
  target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
169
  gfc_add_expr_to_block (&se.pre, target);
170
  return gfc_finish_block (&se.pre);
171
}
172
 
173
 
174
/* Translate an ENTRY statement.  Just adds a label for this entry point.  */
175
tree
176
gfc_trans_entry (gfc_code * code)
177
{
178
  return build1_v (LABEL_EXPR, code->ext.entry->label);
179
}
180
 
181
 
182
/* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
183
   elemental subroutines.  Make temporaries for output arguments if any such
184
   dependencies are found.  Output arguments are chosen because internal_unpack
185
   can be used, as is, to copy the result back to the variable.  */
186
static void
187
gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
188
                                 gfc_symbol * sym, gfc_actual_arglist * arg,
189
                                 gfc_dep_check check_variable)
190
{
191
  gfc_actual_arglist *arg0;
192
  gfc_expr *e;
193
  gfc_formal_arglist *formal;
194
  gfc_loopinfo tmp_loop;
195
  gfc_se parmse;
196
  gfc_ss *ss;
197
  gfc_ss_info *info;
198
  gfc_symbol *fsym;
199
  gfc_ref *ref;
200
  int n;
201
  tree data;
202
  tree offset;
203
  tree size;
204
  tree tmp;
205
 
206
  if (loopse->ss == NULL)
207
    return;
208
 
209
  ss = loopse->ss;
210
  arg0 = arg;
211
  formal = sym->formal;
212
 
213
  /* Loop over all the arguments testing for dependencies.  */
214
  for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
215
    {
216
      e = arg->expr;
217
      if (e == NULL)
218
        continue;
219
 
220
      /* Obtain the info structure for the current argument.  */
221
      info = NULL;
222
      for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
223
        {
224
          if (ss->expr != e)
225
            continue;
226
          info = &ss->data.info;
227
          break;
228
        }
229
 
230
      /* If there is a dependency, create a temporary and use it
231
         instead of the variable.  */
232
      fsym = formal ? formal->sym : NULL;
233
      if (e->expr_type == EXPR_VARIABLE
234
            && e->rank && fsym
235
            && fsym->attr.intent != INTENT_IN
236
            && gfc_check_fncall_dependency (e, fsym->attr.intent,
237
                                            sym, arg0, check_variable))
238
        {
239
          tree initial, temptype;
240
          stmtblock_t temp_post;
241
 
242
          /* Make a local loopinfo for the temporary creation, so that
243
             none of the other ss->info's have to be renormalized.  */
244
          gfc_init_loopinfo (&tmp_loop);
245
          for (n = 0; n < info->dimen; n++)
246
            {
247
              tmp_loop.to[n] = loopse->loop->to[n];
248
              tmp_loop.from[n] = loopse->loop->from[n];
249
              tmp_loop.order[n] = loopse->loop->order[n];
250
            }
251
 
252
          /* Obtain the argument descriptor for unpacking.  */
253
          gfc_init_se (&parmse, NULL);
254
          parmse.want_pointer = 1;
255
 
256
          /* The scalarizer introduces some specific peculiarities when
257
             handling elemental subroutines; the stride can be needed up to
258
             the dim_array - 1, rather than dim_loop - 1 to calculate
259
             offsets outside the loop.  For this reason, we make sure that
260
             the descriptor has the dimensionality of the array by converting
261
             trailing elements into ranges with end = start.  */
262
          for (ref = e->ref; ref; ref = ref->next)
263
            if (ref->type == REF_ARRAY && ref->u.ar.type == AR_SECTION)
264
              break;
265
 
266
          if (ref)
267
            {
268
              bool seen_range = false;
269
              for (n = 0; n < ref->u.ar.dimen; n++)
270
                {
271
                  if (ref->u.ar.dimen_type[n] == DIMEN_RANGE)
272
                    seen_range = true;
273
 
274
                  if (!seen_range
275
                        || ref->u.ar.dimen_type[n] != DIMEN_ELEMENT)
276
                    continue;
277
 
278
                  ref->u.ar.end[n] = gfc_copy_expr (ref->u.ar.start[n]);
279
                  ref->u.ar.dimen_type[n] = DIMEN_RANGE;
280
                }
281
            }
282
 
283
          gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
284
          gfc_add_block_to_block (&se->pre, &parmse.pre);
285
 
286
          /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
287
             initialize the array temporary with a copy of the values.  */
288
          if (fsym->attr.intent == INTENT_INOUT
289
                || (fsym->ts.type ==BT_DERIVED
290
                      && fsym->attr.intent == INTENT_OUT))
291
            initial = parmse.expr;
292
          else
293
            initial = NULL_TREE;
294
 
295
          /* Find the type of the temporary to create; we don't use the type
296
             of e itself as this breaks for subcomponent-references in e (where
297
             the type of e is that of the final reference, but parmse.expr's
298
             type corresponds to the full derived-type).  */
299
          /* TODO: Fix this somehow so we don't need a temporary of the whole
300
             array but instead only the components referenced.  */
301
          temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor.  */
302
          gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
303
          temptype = TREE_TYPE (temptype);
304
          temptype = gfc_get_element_type (temptype);
305
 
306
          /* Generate the temporary.  Cleaning up the temporary should be the
307
             very last thing done, so we add the code to a new block and add it
308
             to se->post as last instructions.  */
309
          size = gfc_create_var (gfc_array_index_type, NULL);
310
          data = gfc_create_var (pvoid_type_node, NULL);
311
          gfc_init_block (&temp_post);
312
          tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
313
                                             &tmp_loop, info, temptype,
314
                                             initial,
315
                                             false, true, false,
316
                                             &arg->expr->where);
317
          gfc_add_modify (&se->pre, size, tmp);
318
          tmp = fold_convert (pvoid_type_node, info->data);
319
          gfc_add_modify (&se->pre, data, tmp);
320
 
321
          /* Calculate the offset for the temporary.  */
322
          offset = gfc_index_zero_node;
323
          for (n = 0; n < info->dimen; n++)
324
            {
325
              tmp = gfc_conv_descriptor_stride_get (info->descriptor,
326
                                                    gfc_rank_cst[n]);
327
              tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
328
                                 loopse->loop->from[n], tmp);
329
              offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
330
                                    offset, tmp);
331
            }
332
          info->offset = gfc_create_var (gfc_array_index_type, NULL);
333
          gfc_add_modify (&se->pre, info->offset, offset);
334
 
335
          /* Copy the result back using unpack.  */
336
          tmp = build_call_expr_loc (input_location,
337
                                 gfor_fndecl_in_unpack, 2, parmse.expr, data);
338
          gfc_add_expr_to_block (&se->post, tmp);
339
 
340
          /* parmse.pre is already added above.  */
341
          gfc_add_block_to_block (&se->post, &parmse.post);
342
          gfc_add_block_to_block (&se->post, &temp_post);
343
        }
344
    }
345
}
346
 
347
 
348
/* Translate the CALL statement.  Builds a call to an F95 subroutine.  */
349
 
350
tree
351
gfc_trans_call (gfc_code * code, bool dependency_check,
352
                tree mask, tree count1, bool invert)
353
{
354
  gfc_se se;
355
  gfc_ss * ss;
356
  int has_alternate_specifier;
357
  gfc_dep_check check_variable;
358
  tree index = NULL_TREE;
359
  tree maskexpr = NULL_TREE;
360
  tree tmp;
361
 
362
  /* A CALL starts a new block because the actual arguments may have to
363
     be evaluated first.  */
364
  gfc_init_se (&se, NULL);
365
  gfc_start_block (&se.pre);
366
 
367
  gcc_assert (code->resolved_sym);
368
 
369
  ss = gfc_ss_terminator;
370
  if (code->resolved_sym->attr.elemental)
371
    ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
372
 
373
  /* Is not an elemental subroutine call with array valued arguments.  */
374
  if (ss == gfc_ss_terminator)
375
    {
376
 
377
      /* Translate the call.  */
378
      has_alternate_specifier
379
        = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
380
                                  code->expr1, NULL_TREE);
381
 
382
      /* A subroutine without side-effect, by definition, does nothing!  */
383
      TREE_SIDE_EFFECTS (se.expr) = 1;
384
 
385
      /* Chain the pieces together and return the block.  */
386
      if (has_alternate_specifier)
387
        {
388
          gfc_code *select_code;
389
          gfc_symbol *sym;
390
          select_code = code->next;
391
          gcc_assert(select_code->op == EXEC_SELECT);
392
          sym = select_code->expr1->symtree->n.sym;
393
          se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
394
          if (sym->backend_decl == NULL)
395
            sym->backend_decl = gfc_get_symbol_decl (sym);
396
          gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
397
        }
398
      else
399
        gfc_add_expr_to_block (&se.pre, se.expr);
400
 
401
      gfc_add_block_to_block (&se.pre, &se.post);
402
    }
403
 
404
  else
405
    {
406
      /* An elemental subroutine call with array valued arguments has
407
         to be scalarized.  */
408
      gfc_loopinfo loop;
409
      stmtblock_t body;
410
      stmtblock_t block;
411
      gfc_se loopse;
412
      gfc_se depse;
413
 
414
      /* gfc_walk_elemental_function_args renders the ss chain in the
415
         reverse order to the actual argument order.  */
416
      ss = gfc_reverse_ss (ss);
417
 
418
      /* Initialize the loop.  */
419
      gfc_init_se (&loopse, NULL);
420
      gfc_init_loopinfo (&loop);
421
      gfc_add_ss_to_loop (&loop, ss);
422
 
423
      gfc_conv_ss_startstride (&loop);
424
      /* TODO: gfc_conv_loop_setup generates a temporary for vector
425
         subscripts.  This could be prevented in the elemental case
426
         as temporaries are handled separatedly
427
         (below in gfc_conv_elemental_dependencies).  */
428
      gfc_conv_loop_setup (&loop, &code->expr1->where);
429
      gfc_mark_ss_chain_used (ss, 1);
430
 
431
      /* Convert the arguments, checking for dependencies.  */
432
      gfc_copy_loopinfo_to_se (&loopse, &loop);
433
      loopse.ss = ss;
434
 
435
      /* For operator assignment, do dependency checking.  */
436
      if (dependency_check)
437
        check_variable = ELEM_CHECK_VARIABLE;
438
      else
439
        check_variable = ELEM_DONT_CHECK_VARIABLE;
440
 
441
      gfc_init_se (&depse, NULL);
442
      gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
443
                                       code->ext.actual, check_variable);
444
 
445
      gfc_add_block_to_block (&loop.pre,  &depse.pre);
446
      gfc_add_block_to_block (&loop.post, &depse.post);
447
 
448
      /* Generate the loop body.  */
449
      gfc_start_scalarized_body (&loop, &body);
450
      gfc_init_block (&block);
451
 
452
      if (mask && count1)
453
        {
454
          /* Form the mask expression according to the mask.  */
455
          index = count1;
456
          maskexpr = gfc_build_array_ref (mask, index, NULL);
457
          if (invert)
458
            maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
459
                                    maskexpr);
460
        }
461
 
462
      /* Add the subroutine call to the block.  */
463
      gfc_conv_procedure_call (&loopse, code->resolved_sym,
464
                               code->ext.actual, code->expr1,
465
                               NULL_TREE);
466
 
467
      if (mask && count1)
468
        {
469
          tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
470
                          build_empty_stmt (input_location));
471
          gfc_add_expr_to_block (&loopse.pre, tmp);
472
          tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
473
                             count1, gfc_index_one_node);
474
          gfc_add_modify (&loopse.pre, count1, tmp);
475
        }
476
      else
477
        gfc_add_expr_to_block (&loopse.pre, loopse.expr);
478
 
479
      gfc_add_block_to_block (&block, &loopse.pre);
480
      gfc_add_block_to_block (&block, &loopse.post);
481
 
482
      /* Finish up the loop block and the loop.  */
483
      gfc_add_expr_to_block (&body, gfc_finish_block (&block));
484
      gfc_trans_scalarizing_loops (&loop, &body);
485
      gfc_add_block_to_block (&se.pre, &loop.pre);
486
      gfc_add_block_to_block (&se.pre, &loop.post);
487
      gfc_add_block_to_block (&se.pre, &se.post);
488
      gfc_cleanup_loop (&loop);
489
    }
490
 
491
  return gfc_finish_block (&se.pre);
492
}
493
 
494
 
495
/* Translate the RETURN statement.  */
496
 
497
tree
498
gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
499
{
500
  if (code->expr1)
501
    {
502
      gfc_se se;
503
      tree tmp;
504
      tree result;
505
 
506
      /* If code->expr is not NULL, this return statement must appear
507
         in a subroutine and current_fake_result_decl has already
508
         been generated.  */
509
 
510
      result = gfc_get_fake_result_decl (NULL, 0);
511
      if (!result)
512
        {
513
          gfc_warning ("An alternate return at %L without a * dummy argument",
514
                        &code->expr1->where);
515
          return build1_v (GOTO_EXPR, gfc_get_return_label ());
516
        }
517
 
518
      /* Start a new block for this statement.  */
519
      gfc_init_se (&se, NULL);
520
      gfc_start_block (&se.pre);
521
 
522
      gfc_conv_expr (&se, code->expr1);
523
 
524
      tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
525
                         fold_convert (TREE_TYPE (result), se.expr));
526
      gfc_add_expr_to_block (&se.pre, tmp);
527
 
528
      tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
529
      gfc_add_expr_to_block (&se.pre, tmp);
530
      gfc_add_block_to_block (&se.pre, &se.post);
531
      return gfc_finish_block (&se.pre);
532
    }
533
  else
534
    return build1_v (GOTO_EXPR, gfc_get_return_label ());
535
}
536
 
537
 
538
/* Translate the PAUSE statement.  We have to translate this statement
539
   to a runtime library call.  */
540
 
541
tree
542
gfc_trans_pause (gfc_code * code)
543
{
544
  tree gfc_int4_type_node = gfc_get_int_type (4);
545
  gfc_se se;
546
  tree tmp;
547
 
548
  /* Start a new block for this statement.  */
549
  gfc_init_se (&se, NULL);
550
  gfc_start_block (&se.pre);
551
 
552
 
553
  if (code->expr1 == NULL)
554
    {
555
      tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
556
      tmp = build_call_expr_loc (input_location,
557
                             gfor_fndecl_pause_numeric, 1, tmp);
558
    }
559
  else
560
    {
561
      gfc_conv_expr_reference (&se, code->expr1);
562
      tmp = build_call_expr_loc (input_location,
563
                             gfor_fndecl_pause_string, 2,
564
                             se.expr, se.string_length);
565
    }
566
 
567
  gfc_add_expr_to_block (&se.pre, tmp);
568
 
569
  gfc_add_block_to_block (&se.pre, &se.post);
570
 
571
  return gfc_finish_block (&se.pre);
572
}
573
 
574
 
575
/* Translate the STOP statement.  We have to translate this statement
576
   to a runtime library call.  */
577
 
578
tree
579
gfc_trans_stop (gfc_code * code)
580
{
581
  tree gfc_int4_type_node = gfc_get_int_type (4);
582
  gfc_se se;
583
  tree tmp;
584
 
585
  /* Start a new block for this statement.  */
586
  gfc_init_se (&se, NULL);
587
  gfc_start_block (&se.pre);
588
 
589
 
590
  if (code->expr1 == NULL)
591
    {
592
      tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
593
      tmp = build_call_expr_loc (input_location,
594
                             gfor_fndecl_stop_numeric, 1, tmp);
595
    }
596
  else
597
    {
598
      gfc_conv_expr_reference (&se, code->expr1);
599
      tmp = build_call_expr_loc (input_location,
600
                             gfor_fndecl_stop_string, 2,
601
                             se.expr, se.string_length);
602
    }
603
 
604
  gfc_add_expr_to_block (&se.pre, tmp);
605
 
606
  gfc_add_block_to_block (&se.pre, &se.post);
607
 
608
  return gfc_finish_block (&se.pre);
609
}
610
 
611
 
612
/* Generate GENERIC for the IF construct. This function also deals with
613
   the simple IF statement, because the front end translates the IF
614
   statement into an IF construct.
615
 
616
   We translate:
617
 
618
        IF (cond) THEN
619
           then_clause
620
        ELSEIF (cond2)
621
           elseif_clause
622
        ELSE
623
           else_clause
624
        ENDIF
625
 
626
   into:
627
 
628
        pre_cond_s;
629
        if (cond_s)
630
          {
631
            then_clause;
632
          }
633
        else
634
          {
635
            pre_cond_s
636
            if (cond_s)
637
              {
638
                elseif_clause
639
              }
640
            else
641
              {
642
                else_clause;
643
              }
644
          }
645
 
646
   where COND_S is the simplified version of the predicate. PRE_COND_S
647
   are the pre side-effects produced by the translation of the
648
   conditional.
649
   We need to build the chain recursively otherwise we run into
650
   problems with folding incomplete statements.  */
651
 
652
static tree
653
gfc_trans_if_1 (gfc_code * code)
654
{
655
  gfc_se if_se;
656
  tree stmt, elsestmt;
657
 
658
  /* Check for an unconditional ELSE clause.  */
659
  if (!code->expr1)
660
    return gfc_trans_code (code->next);
661
 
662
  /* Initialize a statement builder for each block. Puts in NULL_TREEs.  */
663
  gfc_init_se (&if_se, NULL);
664
  gfc_start_block (&if_se.pre);
665
 
666
  /* Calculate the IF condition expression.  */
667
  gfc_conv_expr_val (&if_se, code->expr1);
668
 
669
  /* Translate the THEN clause.  */
670
  stmt = gfc_trans_code (code->next);
671
 
672
  /* Translate the ELSE clause.  */
673
  if (code->block)
674
    elsestmt = gfc_trans_if_1 (code->block);
675
  else
676
    elsestmt = build_empty_stmt (input_location);
677
 
678
  /* Build the condition expression and add it to the condition block.  */
679
  stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
680
 
681
  gfc_add_expr_to_block (&if_se.pre, stmt);
682
 
683
  /* Finish off this statement.  */
684
  return gfc_finish_block (&if_se.pre);
685
}
686
 
687
tree
688
gfc_trans_if (gfc_code * code)
689
{
690
  /* Ignore the top EXEC_IF, it only announces an IF construct. The
691
     actual code we must translate is in code->block.  */
692
 
693
  return gfc_trans_if_1 (code->block);
694
}
695
 
696
 
697
/* Translate an arithmetic IF expression.
698
 
699
   IF (cond) label1, label2, label3 translates to
700
 
701
    if (cond <= 0)
702
      {
703
        if (cond < 0)
704
          goto label1;
705
        else // cond == 0
706
          goto label2;
707
      }
708
    else // cond > 0
709
      goto label3;
710
 
711
   An optimized version can be generated in case of equal labels.
712
   E.g., if label1 is equal to label2, we can translate it to
713
 
714
    if (cond <= 0)
715
      goto label1;
716
    else
717
      goto label3;
718
*/
719
 
720
tree
721
gfc_trans_arithmetic_if (gfc_code * code)
722
{
723
  gfc_se se;
724
  tree tmp;
725
  tree branch1;
726
  tree branch2;
727
  tree zero;
728
 
729
  /* Start a new block.  */
730
  gfc_init_se (&se, NULL);
731
  gfc_start_block (&se.pre);
732
 
733
  /* Pre-evaluate COND.  */
734
  gfc_conv_expr_val (&se, code->expr1);
735
  se.expr = gfc_evaluate_now (se.expr, &se.pre);
736
 
737
  /* Build something to compare with.  */
738
  zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
739
 
740
  if (code->label1->value != code->label2->value)
741
    {
742
      /* If (cond < 0) take branch1 else take branch2.
743
         First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases.  */
744
      branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
745
      branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
746
 
747
      if (code->label1->value != code->label3->value)
748
        tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
749
      else
750
        tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
751
 
752
      branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
753
    }
754
  else
755
    branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
756
 
757
  if (code->label1->value != code->label3->value
758
      && code->label2->value != code->label3->value)
759
    {
760
      /* if (cond <= 0) take branch1 else take branch2.  */
761
      branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
762
      tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
763
      branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
764
    }
765
 
766
  /* Append the COND_EXPR to the evaluation of COND, and return.  */
767
  gfc_add_expr_to_block (&se.pre, branch1);
768
  return gfc_finish_block (&se.pre);
769
}
770
 
771
 
772
/* Translate a BLOCK construct.  This is basically what we would do for a
773
   procedure body.  */
774
 
775
tree
776
gfc_trans_block_construct (gfc_code* code)
777
{
778
  gfc_namespace* ns;
779
  gfc_symbol* sym;
780
  stmtblock_t body;
781
  tree tmp;
782
 
783
  ns = code->ext.ns;
784
  gcc_assert (ns);
785
  sym = ns->proc_name;
786
  gcc_assert (sym);
787
 
788
  gcc_assert (!sym->tlink);
789
  sym->tlink = sym;
790
 
791
  gfc_start_block (&body);
792
  gfc_process_block_locals (ns);
793
 
794
  tmp = gfc_trans_code (ns->code);
795
  tmp = gfc_trans_deferred_vars (sym, tmp);
796
 
797
  gfc_add_expr_to_block (&body, tmp);
798
  return gfc_finish_block (&body);
799
}
800
 
801
 
802
/* Translate the simple DO construct.  This is where the loop variable has
803
   integer type and step +-1.  We can't use this in the general case
804
   because integer overflow and floating point errors could give incorrect
805
   results.
806
   We translate a do loop from:
807
 
808
   DO dovar = from, to, step
809
      body
810
   END DO
811
 
812
   to:
813
 
814
   [Evaluate loop bounds and step]
815
   dovar = from;
816
   if ((step > 0) ? (dovar <= to) : (dovar => to))
817
    {
818
      for (;;)
819
        {
820
          body;
821
   cycle_label:
822
          cond = (dovar == to);
823
          dovar += step;
824
          if (cond) goto end_label;
825
        }
826
      }
827
   end_label:
828
 
829
   This helps the optimizers by avoiding the extra induction variable
830
   used in the general case.  */
831
 
832
static tree
833
gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
834
                     tree from, tree to, tree step, tree exit_cond)
835
{
836
  stmtblock_t body;
837
  tree type;
838
  tree cond;
839
  tree tmp;
840
  tree saved_dovar = NULL;
841
  tree cycle_label;
842
  tree exit_label;
843
 
844
  type = TREE_TYPE (dovar);
845
 
846
  /* Initialize the DO variable: dovar = from.  */
847
  gfc_add_modify (pblock, dovar, from);
848
 
849
  /* Save value for do-tinkering checking. */
850
  if (gfc_option.rtcheck & GFC_RTCHECK_DO)
851
    {
852
      saved_dovar = gfc_create_var (type, ".saved_dovar");
853
      gfc_add_modify (pblock, saved_dovar, dovar);
854
    }
855
 
856
  /* Cycle and exit statements are implemented with gotos.  */
857
  cycle_label = gfc_build_label_decl (NULL_TREE);
858
  exit_label = gfc_build_label_decl (NULL_TREE);
859
 
860
  /* Put the labels where they can be found later. See gfc_trans_do().  */
861
  code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
862
 
863
  /* Loop body.  */
864
  gfc_start_block (&body);
865
 
866
  /* Main loop body.  */
867
  tmp = gfc_trans_code_cond (code->block->next, exit_cond);
868
  gfc_add_expr_to_block (&body, tmp);
869
 
870
  /* Label for cycle statements (if needed).  */
871
  if (TREE_USED (cycle_label))
872
    {
873
      tmp = build1_v (LABEL_EXPR, cycle_label);
874
      gfc_add_expr_to_block (&body, tmp);
875
    }
876
 
877
  /* Check whether someone has modified the loop variable. */
878
  if (gfc_option.rtcheck & GFC_RTCHECK_DO)
879
    {
880
      tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
881
      gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
882
                               "Loop variable has been modified");
883
    }
884
 
885
  /* Exit the loop if there is an I/O result condition or error.  */
886
  if (exit_cond)
887
    {
888
      tmp = build1_v (GOTO_EXPR, exit_label);
889
      tmp = fold_build3 (COND_EXPR, void_type_node, exit_cond, tmp,
890
                         build_empty_stmt (input_location));
891
      gfc_add_expr_to_block (&body, tmp);
892
    }
893
 
894
  /* Evaluate the loop condition.  */
895
  cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
896
  cond = gfc_evaluate_now (cond, &body);
897
 
898
  /* Increment the loop variable.  */
899
  tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
900
  gfc_add_modify (&body, dovar, tmp);
901
 
902
  if (gfc_option.rtcheck & GFC_RTCHECK_DO)
903
    gfc_add_modify (&body, saved_dovar, dovar);
904
 
905
  /* The loop exit.  */
906
  tmp = build1_v (GOTO_EXPR, exit_label);
907
  TREE_USED (exit_label) = 1;
908
  tmp = fold_build3 (COND_EXPR, void_type_node,
909
                     cond, tmp, build_empty_stmt (input_location));
910
  gfc_add_expr_to_block (&body, tmp);
911
 
912
  /* Finish the loop body.  */
913
  tmp = gfc_finish_block (&body);
914
  tmp = build1_v (LOOP_EXPR, tmp);
915
 
916
  /* Only execute the loop if the number of iterations is positive.  */
917
  if (tree_int_cst_sgn (step) > 0)
918
    cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
919
  else
920
    cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
921
  tmp = fold_build3 (COND_EXPR, void_type_node,
922
                     cond, tmp, build_empty_stmt (input_location));
923
  gfc_add_expr_to_block (pblock, tmp);
924
 
925
  /* Add the exit label.  */
926
  tmp = build1_v (LABEL_EXPR, exit_label);
927
  gfc_add_expr_to_block (pblock, tmp);
928
 
929
  return gfc_finish_block (pblock);
930
}
931
 
932
/* Translate the DO construct.  This obviously is one of the most
933
   important ones to get right with any compiler, but especially
934
   so for Fortran.
935
 
936
   We special case some loop forms as described in gfc_trans_simple_do.
937
   For other cases we implement them with a separate loop count,
938
   as described in the standard.
939
 
940
   We translate a do loop from:
941
 
942
   DO dovar = from, to, step
943
      body
944
   END DO
945
 
946
   to:
947
 
948
   [evaluate loop bounds and step]
949
   empty = (step > 0 ? to < from : to > from);
950
   countm1 = (to - from) / step;
951
   dovar = from;
952
   if (empty) goto exit_label;
953
   for (;;)
954
     {
955
       body;
956
cycle_label:
957
       dovar += step
958
       if (countm1 ==0) goto exit_label;
959
       countm1--;
960
     }
961
exit_label:
962
 
963
   countm1 is an unsigned integer.  It is equal to the loop count minus one,
964
   because the loop count itself can overflow.  */
965
 
966
tree
967
gfc_trans_do (gfc_code * code, tree exit_cond)
968
{
969
  gfc_se se;
970
  tree dovar;
971
  tree saved_dovar = NULL;
972
  tree from;
973
  tree to;
974
  tree step;
975
  tree countm1;
976
  tree type;
977
  tree utype;
978
  tree cond;
979
  tree cycle_label;
980
  tree exit_label;
981
  tree tmp;
982
  tree pos_step;
983
  stmtblock_t block;
984
  stmtblock_t body;
985
 
986
  gfc_start_block (&block);
987
 
988
  /* Evaluate all the expressions in the iterator.  */
989
  gfc_init_se (&se, NULL);
990
  gfc_conv_expr_lhs (&se, code->ext.iterator->var);
991
  gfc_add_block_to_block (&block, &se.pre);
992
  dovar = se.expr;
993
  type = TREE_TYPE (dovar);
994
 
995
  gfc_init_se (&se, NULL);
996
  gfc_conv_expr_val (&se, code->ext.iterator->start);
997
  gfc_add_block_to_block (&block, &se.pre);
998
  from = gfc_evaluate_now (se.expr, &block);
999
 
1000
  gfc_init_se (&se, NULL);
1001
  gfc_conv_expr_val (&se, code->ext.iterator->end);
1002
  gfc_add_block_to_block (&block, &se.pre);
1003
  to = gfc_evaluate_now (se.expr, &block);
1004
 
1005
  gfc_init_se (&se, NULL);
1006
  gfc_conv_expr_val (&se, code->ext.iterator->step);
1007
  gfc_add_block_to_block (&block, &se.pre);
1008
  step = gfc_evaluate_now (se.expr, &block);
1009
 
1010
  if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1011
    {
1012
      tmp = fold_build2 (EQ_EXPR, boolean_type_node, step,
1013
                         fold_convert (type, integer_zero_node));
1014
      gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1015
                               "DO step value is zero");
1016
    }
1017
 
1018
  /* Special case simple loops.  */
1019
  if (TREE_CODE (type) == INTEGER_TYPE
1020
      && (integer_onep (step)
1021
        || tree_int_cst_equal (step, integer_minus_one_node)))
1022
    return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1023
 
1024
  pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
1025
                          fold_convert (type, integer_zero_node));
1026
 
1027
  if (TREE_CODE (type) == INTEGER_TYPE)
1028
    utype = unsigned_type_for (type);
1029
  else
1030
    utype = unsigned_type_for (gfc_array_index_type);
1031
  countm1 = gfc_create_var (utype, "countm1");
1032
 
1033
  /* Cycle and exit statements are implemented with gotos.  */
1034
  cycle_label = gfc_build_label_decl (NULL_TREE);
1035
  exit_label = gfc_build_label_decl (NULL_TREE);
1036
  TREE_USED (exit_label) = 1;
1037
 
1038
  /* Initialize the DO variable: dovar = from.  */
1039
  gfc_add_modify (&block, dovar, from);
1040
 
1041
  /* Save value for do-tinkering checking. */
1042
  if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1043
    {
1044
      saved_dovar = gfc_create_var (type, ".saved_dovar");
1045
      gfc_add_modify (&block, saved_dovar, dovar);
1046
    }
1047
 
1048
  /* Initialize loop count and jump to exit label if the loop is empty.
1049
     This code is executed before we enter the loop body. We generate:
1050
     step_sign = sign(1,step);
1051
     if (step > 0)
1052
       {
1053
         if (to < from)
1054
           goto exit_label;
1055
       }
1056
     else
1057
       {
1058
         if (to > from)
1059
           goto exit_label;
1060
       }
1061
       countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1062
 
1063
  */
1064
 
1065
  if (TREE_CODE (type) == INTEGER_TYPE)
1066
    {
1067
      tree pos, neg, step_sign, to2, from2, step2;
1068
 
1069
      /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1)  */
1070
 
1071
      tmp = fold_build2 (LT_EXPR, boolean_type_node, step,
1072
                         build_int_cst (TREE_TYPE (step), 0));
1073
      step_sign = fold_build3 (COND_EXPR, type, tmp,
1074
                               build_int_cst (type, -1),
1075
                               build_int_cst (type, 1));
1076
 
1077
      tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
1078
      pos = fold_build3 (COND_EXPR, void_type_node, tmp,
1079
                         build1_v (GOTO_EXPR, exit_label),
1080
                         build_empty_stmt (input_location));
1081
 
1082
      tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
1083
      neg = fold_build3 (COND_EXPR, void_type_node, tmp,
1084
                         build1_v (GOTO_EXPR, exit_label),
1085
                         build_empty_stmt (input_location));
1086
      tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
1087
 
1088
      gfc_add_expr_to_block (&block, tmp);
1089
 
1090
      /* Calculate the loop count.  to-from can overflow, so
1091
         we cast to unsigned.  */
1092
 
1093
      to2 = fold_build2 (MULT_EXPR, type, step_sign, to);
1094
      from2 = fold_build2 (MULT_EXPR, type, step_sign, from);
1095
      step2 = fold_build2 (MULT_EXPR, type, step_sign, step);
1096
      step2 = fold_convert (utype, step2);
1097
      tmp = fold_build2 (MINUS_EXPR, type, to2, from2);
1098
      tmp = fold_convert (utype, tmp);
1099
      tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp, step2);
1100
      tmp = fold_build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1101
      gfc_add_expr_to_block (&block, tmp);
1102
    }
1103
  else
1104
    {
1105
      /* TODO: We could use the same width as the real type.
1106
         This would probably cause more problems that it solves
1107
         when we implement "long double" types.  */
1108
 
1109
      tmp = fold_build2 (MINUS_EXPR, type, to, from);
1110
      tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
1111
      tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
1112
      gfc_add_modify (&block, countm1, tmp);
1113
 
1114
      /* We need a special check for empty loops:
1115
         empty = (step > 0 ? to < from : to > from);  */
1116
      tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
1117
                         fold_build2 (LT_EXPR, boolean_type_node, to, from),
1118
                         fold_build2 (GT_EXPR, boolean_type_node, to, from));
1119
      /* If the loop is empty, go directly to the exit label.  */
1120
      tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
1121
                         build1_v (GOTO_EXPR, exit_label),
1122
                         build_empty_stmt (input_location));
1123
      gfc_add_expr_to_block (&block, tmp);
1124
    }
1125
 
1126
  /* Loop body.  */
1127
  gfc_start_block (&body);
1128
 
1129
  /* Put these labels where they can be found later. We put the
1130
     labels in a TREE_LIST node (because TREE_CHAIN is already
1131
     used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1132
     label in TREE_VALUE (backend_decl).  */
1133
 
1134
  code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1135
 
1136
  /* Main loop body.  */
1137
  tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1138
  gfc_add_expr_to_block (&body, tmp);
1139
 
1140
  /* Label for cycle statements (if needed).  */
1141
  if (TREE_USED (cycle_label))
1142
    {
1143
      tmp = build1_v (LABEL_EXPR, cycle_label);
1144
      gfc_add_expr_to_block (&body, tmp);
1145
    }
1146
 
1147
  /* Check whether someone has modified the loop variable. */
1148
  if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1149
    {
1150
      tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
1151
      gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1152
                               "Loop variable has been modified");
1153
    }
1154
 
1155
  /* Exit the loop if there is an I/O result condition or error.  */
1156
  if (exit_cond)
1157
    {
1158
      tmp = build1_v (GOTO_EXPR, exit_label);
1159
      tmp = fold_build3 (COND_EXPR, void_type_node, exit_cond, tmp,
1160
                         build_empty_stmt (input_location));
1161
      gfc_add_expr_to_block (&body, tmp);
1162
    }
1163
 
1164
  /* Increment the loop variable.  */
1165
  tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1166
  gfc_add_modify (&body, dovar, tmp);
1167
 
1168
  if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1169
    gfc_add_modify (&body, saved_dovar, dovar);
1170
 
1171
  /* End with the loop condition.  Loop until countm1 == 0.  */
1172
  cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1173
                      build_int_cst (utype, 0));
1174
  tmp = build1_v (GOTO_EXPR, exit_label);
1175
  tmp = fold_build3 (COND_EXPR, void_type_node,
1176
                     cond, tmp, build_empty_stmt (input_location));
1177
  gfc_add_expr_to_block (&body, tmp);
1178
 
1179
  /* Decrement the loop count.  */
1180
  tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1181
  gfc_add_modify (&body, countm1, tmp);
1182
 
1183
  /* End of loop body.  */
1184
  tmp = gfc_finish_block (&body);
1185
 
1186
  /* The for loop itself.  */
1187
  tmp = build1_v (LOOP_EXPR, tmp);
1188
  gfc_add_expr_to_block (&block, tmp);
1189
 
1190
  /* Add the exit label.  */
1191
  tmp = build1_v (LABEL_EXPR, exit_label);
1192
  gfc_add_expr_to_block (&block, tmp);
1193
 
1194
  return gfc_finish_block (&block);
1195
}
1196
 
1197
 
1198
/* Translate the DO WHILE construct.
1199
 
1200
   We translate
1201
 
1202
   DO WHILE (cond)
1203
      body
1204
   END DO
1205
 
1206
   to:
1207
 
1208
   for ( ; ; )
1209
     {
1210
       pre_cond;
1211
       if (! cond) goto exit_label;
1212
       body;
1213
cycle_label:
1214
     }
1215
exit_label:
1216
 
1217
   Because the evaluation of the exit condition `cond' may have side
1218
   effects, we can't do much for empty loop bodies.  The backend optimizers
1219
   should be smart enough to eliminate any dead loops.  */
1220
 
1221
tree
1222
gfc_trans_do_while (gfc_code * code)
1223
{
1224
  gfc_se cond;
1225
  tree tmp;
1226
  tree cycle_label;
1227
  tree exit_label;
1228
  stmtblock_t block;
1229
 
1230
  /* Everything we build here is part of the loop body.  */
1231
  gfc_start_block (&block);
1232
 
1233
  /* Cycle and exit statements are implemented with gotos.  */
1234
  cycle_label = gfc_build_label_decl (NULL_TREE);
1235
  exit_label = gfc_build_label_decl (NULL_TREE);
1236
 
1237
  /* Put the labels where they can be found later. See gfc_trans_do().  */
1238
  code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1239
 
1240
  /* Create a GIMPLE version of the exit condition.  */
1241
  gfc_init_se (&cond, NULL);
1242
  gfc_conv_expr_val (&cond, code->expr1);
1243
  gfc_add_block_to_block (&block, &cond.pre);
1244
  cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1245
 
1246
  /* Build "IF (! cond) GOTO exit_label".  */
1247
  tmp = build1_v (GOTO_EXPR, exit_label);
1248
  TREE_USED (exit_label) = 1;
1249
  tmp = fold_build3 (COND_EXPR, void_type_node,
1250
                     cond.expr, tmp, build_empty_stmt (input_location));
1251
  gfc_add_expr_to_block (&block, tmp);
1252
 
1253
  /* The main body of the loop.  */
1254
  tmp = gfc_trans_code (code->block->next);
1255
  gfc_add_expr_to_block (&block, tmp);
1256
 
1257
  /* Label for cycle statements (if needed).  */
1258
  if (TREE_USED (cycle_label))
1259
    {
1260
      tmp = build1_v (LABEL_EXPR, cycle_label);
1261
      gfc_add_expr_to_block (&block, tmp);
1262
    }
1263
 
1264
  /* End of loop body.  */
1265
  tmp = gfc_finish_block (&block);
1266
 
1267
  gfc_init_block (&block);
1268
  /* Build the loop.  */
1269
  tmp = build1_v (LOOP_EXPR, tmp);
1270
  gfc_add_expr_to_block (&block, tmp);
1271
 
1272
  /* Add the exit label.  */
1273
  tmp = build1_v (LABEL_EXPR, exit_label);
1274
  gfc_add_expr_to_block (&block, tmp);
1275
 
1276
  return gfc_finish_block (&block);
1277
}
1278
 
1279
 
1280
/* Translate the SELECT CASE construct for INTEGER case expressions,
1281
   without killing all potential optimizations.  The problem is that
1282
   Fortran allows unbounded cases, but the back-end does not, so we
1283
   need to intercept those before we enter the equivalent SWITCH_EXPR
1284
   we can build.
1285
 
1286
   For example, we translate this,
1287
 
1288
   SELECT CASE (expr)
1289
      CASE (:100,101,105:115)
1290
         block_1
1291
      CASE (190:199,200:)
1292
         block_2
1293
      CASE (300)
1294
         block_3
1295
      CASE DEFAULT
1296
         block_4
1297
   END SELECT
1298
 
1299
   to the GENERIC equivalent,
1300
 
1301
     switch (expr)
1302
       {
1303
         case (minimum value for typeof(expr) ... 100:
1304
         case 101:
1305
         case 105 ... 114:
1306
           block1:
1307
           goto end_label;
1308
 
1309
         case 200 ... (maximum value for typeof(expr):
1310
         case 190 ... 199:
1311
           block2;
1312
           goto end_label;
1313
 
1314
         case 300:
1315
           block_3;
1316
           goto end_label;
1317
 
1318
         default:
1319
           block_4;
1320
           goto end_label;
1321
       }
1322
 
1323
     end_label:  */
1324
 
1325
static tree
1326
gfc_trans_integer_select (gfc_code * code)
1327
{
1328
  gfc_code *c;
1329
  gfc_case *cp;
1330
  tree end_label;
1331
  tree tmp;
1332
  gfc_se se;
1333
  stmtblock_t block;
1334
  stmtblock_t body;
1335
 
1336
  gfc_start_block (&block);
1337
 
1338
  /* Calculate the switch expression.  */
1339
  gfc_init_se (&se, NULL);
1340
  gfc_conv_expr_val (&se, code->expr1);
1341
  gfc_add_block_to_block (&block, &se.pre);
1342
 
1343
  end_label = gfc_build_label_decl (NULL_TREE);
1344
 
1345
  gfc_init_block (&body);
1346
 
1347
  for (c = code->block; c; c = c->block)
1348
    {
1349
      for (cp = c->ext.case_list; cp; cp = cp->next)
1350
        {
1351
          tree low, high;
1352
          tree label;
1353
 
1354
          /* Assume it's the default case.  */
1355
          low = high = NULL_TREE;
1356
 
1357
          if (cp->low)
1358
            {
1359
              low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1360
                                          cp->low->ts.kind);
1361
 
1362
              /* If there's only a lower bound, set the high bound to the
1363
                 maximum value of the case expression.  */
1364
              if (!cp->high)
1365
                high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1366
            }
1367
 
1368
          if (cp->high)
1369
            {
1370
              /* Three cases are possible here:
1371
 
1372
                 1) There is no lower bound, e.g. CASE (:N).
1373
                 2) There is a lower bound .NE. high bound, that is
1374
                    a case range, e.g. CASE (N:M) where M>N (we make
1375
                    sure that M>N during type resolution).
1376
                 3) There is a lower bound, and it has the same value
1377
                    as the high bound, e.g. CASE (N:N).  This is our
1378
                    internal representation of CASE(N).
1379
 
1380
                 In the first and second case, we need to set a value for
1381
                 high.  In the third case, we don't because the GCC middle
1382
                 end represents a single case value by just letting high be
1383
                 a NULL_TREE.  We can't do that because we need to be able
1384
                 to represent unbounded cases.  */
1385
 
1386
              if (!cp->low
1387
                  || (cp->low
1388
                      && mpz_cmp (cp->low->value.integer,
1389
                                  cp->high->value.integer) != 0))
1390
                high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1391
                                             cp->high->ts.kind);
1392
 
1393
              /* Unbounded case.  */
1394
              if (!cp->low)
1395
                low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1396
            }
1397
 
1398
          /* Build a label.  */
1399
          label = gfc_build_label_decl (NULL_TREE);
1400
 
1401
          /* Add this case label.
1402
             Add parameter 'label', make it match GCC backend.  */
1403
          tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1404
                             low, high, label);
1405
          gfc_add_expr_to_block (&body, tmp);
1406
        }
1407
 
1408
      /* Add the statements for this case.  */
1409
      tmp = gfc_trans_code (c->next);
1410
      gfc_add_expr_to_block (&body, tmp);
1411
 
1412
      /* Break to the end of the construct.  */
1413
      tmp = build1_v (GOTO_EXPR, end_label);
1414
      gfc_add_expr_to_block (&body, tmp);
1415
    }
1416
 
1417
  tmp = gfc_finish_block (&body);
1418
  tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1419
  gfc_add_expr_to_block (&block, tmp);
1420
 
1421
  tmp = build1_v (LABEL_EXPR, end_label);
1422
  gfc_add_expr_to_block (&block, tmp);
1423
 
1424
  return gfc_finish_block (&block);
1425
}
1426
 
1427
 
1428
/* Translate the SELECT CASE construct for LOGICAL case expressions.
1429
 
1430
   There are only two cases possible here, even though the standard
1431
   does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1432
   .FALSE., and DEFAULT.
1433
 
1434
   We never generate more than two blocks here.  Instead, we always
1435
   try to eliminate the DEFAULT case.  This way, we can translate this
1436
   kind of SELECT construct to a simple
1437
 
1438
   if {} else {};
1439
 
1440
   expression in GENERIC.  */
1441
 
1442
static tree
1443
gfc_trans_logical_select (gfc_code * code)
1444
{
1445
  gfc_code *c;
1446
  gfc_code *t, *f, *d;
1447
  gfc_case *cp;
1448
  gfc_se se;
1449
  stmtblock_t block;
1450
 
1451
  /* Assume we don't have any cases at all.  */
1452
  t = f = d = NULL;
1453
 
1454
  /* Now see which ones we actually do have.  We can have at most two
1455
     cases in a single case list: one for .TRUE. and one for .FALSE.
1456
     The default case is always separate.  If the cases for .TRUE. and
1457
     .FALSE. are in the same case list, the block for that case list
1458
     always executed, and we don't generate code a COND_EXPR.  */
1459
  for (c = code->block; c; c = c->block)
1460
    {
1461
      for (cp = c->ext.case_list; cp; cp = cp->next)
1462
        {
1463
          if (cp->low)
1464
            {
1465
              if (cp->low->value.logical == 0) /* .FALSE.  */
1466
                f = c;
1467
              else /* if (cp->value.logical != 0), thus .TRUE.  */
1468
                t = c;
1469
            }
1470
          else
1471
            d = c;
1472
        }
1473
    }
1474
 
1475
  /* Start a new block.  */
1476
  gfc_start_block (&block);
1477
 
1478
  /* Calculate the switch expression.  We always need to do this
1479
     because it may have side effects.  */
1480
  gfc_init_se (&se, NULL);
1481
  gfc_conv_expr_val (&se, code->expr1);
1482
  gfc_add_block_to_block (&block, &se.pre);
1483
 
1484
  if (t == f && t != NULL)
1485
    {
1486
      /* Cases for .TRUE. and .FALSE. are in the same block.  Just
1487
         translate the code for these cases, append it to the current
1488
         block.  */
1489
      gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1490
    }
1491
  else
1492
    {
1493
      tree true_tree, false_tree, stmt;
1494
 
1495
      true_tree = build_empty_stmt (input_location);
1496
      false_tree = build_empty_stmt (input_location);
1497
 
1498
      /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1499
          Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1500
          make the missing case the default case.  */
1501
      if (t != NULL && f != NULL)
1502
        d = NULL;
1503
      else if (d != NULL)
1504
        {
1505
          if (t == NULL)
1506
            t = d;
1507
          else
1508
            f = d;
1509
        }
1510
 
1511
      /* Translate the code for each of these blocks, and append it to
1512
         the current block.  */
1513
      if (t != NULL)
1514
        true_tree = gfc_trans_code (t->next);
1515
 
1516
      if (f != NULL)
1517
        false_tree = gfc_trans_code (f->next);
1518
 
1519
      stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1520
                          true_tree, false_tree);
1521
      gfc_add_expr_to_block (&block, stmt);
1522
    }
1523
 
1524
  return gfc_finish_block (&block);
1525
}
1526
 
1527
 
1528
/* Translate the SELECT CASE construct for CHARACTER case expressions.
1529
   Instead of generating compares and jumps, it is far simpler to
1530
   generate a data structure describing the cases in order and call a
1531
   library subroutine that locates the right case.
1532
   This is particularly true because this is the only case where we
1533
   might have to dispose of a temporary.
1534
   The library subroutine returns a pointer to jump to or NULL if no
1535
   branches are to be taken.  */
1536
 
1537
static tree
1538
gfc_trans_character_select (gfc_code *code)
1539
{
1540
  tree init, node, end_label, tmp, type, case_num, label, fndecl;
1541
  stmtblock_t block, body;
1542
  gfc_case *cp, *d;
1543
  gfc_code *c;
1544
  gfc_se se;
1545
  int n, k;
1546
 
1547
  /* The jump table types are stored in static variables to avoid
1548
     constructing them from scratch every single time.  */
1549
  static tree select_struct[2];
1550
  static tree ss_string1[2], ss_string1_len[2];
1551
  static tree ss_string2[2], ss_string2_len[2];
1552
  static tree ss_target[2];
1553
 
1554
  tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1555
 
1556
  if (code->expr1->ts.kind == 1)
1557
    k = 0;
1558
  else if (code->expr1->ts.kind == 4)
1559
    k = 1;
1560
  else
1561
    gcc_unreachable ();
1562
 
1563
  if (select_struct[k] == NULL)
1564
    {
1565
      select_struct[k] = make_node (RECORD_TYPE);
1566
 
1567
      if (code->expr1->ts.kind == 1)
1568
        TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1569
      else if (code->expr1->ts.kind == 4)
1570
        TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1571
      else
1572
        gcc_unreachable ();
1573
 
1574
#undef ADD_FIELD
1575
#define ADD_FIELD(NAME, TYPE)                                   \
1576
  ss_##NAME[k] = gfc_add_field_to_struct                                \
1577
     (&(TYPE_FIELDS (select_struct[k])), select_struct[k],      \
1578
      get_identifier (stringize(NAME)), TYPE)
1579
 
1580
      ADD_FIELD (string1, pchartype);
1581
      ADD_FIELD (string1_len, gfc_charlen_type_node);
1582
 
1583
      ADD_FIELD (string2, pchartype);
1584
      ADD_FIELD (string2_len, gfc_charlen_type_node);
1585
 
1586
      ADD_FIELD (target, integer_type_node);
1587
#undef ADD_FIELD
1588
 
1589
      gfc_finish_type (select_struct[k]);
1590
    }
1591
 
1592
  cp = code->block->ext.case_list;
1593
  while (cp->left != NULL)
1594
    cp = cp->left;
1595
 
1596
  n = 0;
1597
  for (d = cp; d; d = d->right)
1598
    d->n = n++;
1599
 
1600
  end_label = gfc_build_label_decl (NULL_TREE);
1601
 
1602
  /* Generate the body */
1603
  gfc_start_block (&block);
1604
  gfc_init_block (&body);
1605
 
1606
  for (c = code->block; c; c = c->block)
1607
    {
1608
      for (d = c->ext.case_list; d; d = d->next)
1609
        {
1610
          label = gfc_build_label_decl (NULL_TREE);
1611
          tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1612
                             build_int_cst (NULL_TREE, d->n),
1613
                             build_int_cst (NULL_TREE, d->n), label);
1614
          gfc_add_expr_to_block (&body, tmp);
1615
        }
1616
 
1617
      tmp = gfc_trans_code (c->next);
1618
      gfc_add_expr_to_block (&body, tmp);
1619
 
1620
      tmp = build1_v (GOTO_EXPR, end_label);
1621
      gfc_add_expr_to_block (&body, tmp);
1622
    }
1623
 
1624
  /* Generate the structure describing the branches */
1625
  init = NULL_TREE;
1626
 
1627
  for(d = cp; d; d = d->right)
1628
    {
1629
      node = NULL_TREE;
1630
 
1631
      gfc_init_se (&se, NULL);
1632
 
1633
      if (d->low == NULL)
1634
        {
1635
          node = tree_cons (ss_string1[k], null_pointer_node, node);
1636
          node = tree_cons (ss_string1_len[k], integer_zero_node, node);
1637
        }
1638
      else
1639
        {
1640
          gfc_conv_expr_reference (&se, d->low);
1641
 
1642
          node = tree_cons (ss_string1[k], se.expr, node);
1643
          node = tree_cons (ss_string1_len[k], se.string_length, node);
1644
        }
1645
 
1646
      if (d->high == NULL)
1647
        {
1648
          node = tree_cons (ss_string2[k], null_pointer_node, node);
1649
          node = tree_cons (ss_string2_len[k], integer_zero_node, node);
1650
        }
1651
      else
1652
        {
1653
          gfc_init_se (&se, NULL);
1654
          gfc_conv_expr_reference (&se, d->high);
1655
 
1656
          node = tree_cons (ss_string2[k], se.expr, node);
1657
          node = tree_cons (ss_string2_len[k], se.string_length, node);
1658
        }
1659
 
1660
      node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n),
1661
                        node);
1662
 
1663
      tmp = build_constructor_from_list (select_struct[k], nreverse (node));
1664
      init = tree_cons (NULL_TREE, tmp, init);
1665
    }
1666
 
1667
  type = build_array_type (select_struct[k],
1668
                           build_index_type (build_int_cst (NULL_TREE, n-1)));
1669
 
1670
  init = build_constructor_from_list (type, nreverse(init));
1671
  TREE_CONSTANT (init) = 1;
1672
  TREE_STATIC (init) = 1;
1673
  /* Create a static variable to hold the jump table.  */
1674
  tmp = gfc_create_var (type, "jumptable");
1675
  TREE_CONSTANT (tmp) = 1;
1676
  TREE_STATIC (tmp) = 1;
1677
  TREE_READONLY (tmp) = 1;
1678
  DECL_INITIAL (tmp) = init;
1679
  init = tmp;
1680
 
1681
  /* Build the library call */
1682
  init = gfc_build_addr_expr (pvoid_type_node, init);
1683
 
1684
  gfc_init_se (&se, NULL);
1685
  gfc_conv_expr_reference (&se, code->expr1);
1686
 
1687
  gfc_add_block_to_block (&block, &se.pre);
1688
 
1689
  if (code->expr1->ts.kind == 1)
1690
    fndecl = gfor_fndecl_select_string;
1691
  else if (code->expr1->ts.kind == 4)
1692
    fndecl = gfor_fndecl_select_string_char4;
1693
  else
1694
    gcc_unreachable ();
1695
 
1696
  tmp = build_call_expr_loc (input_location,
1697
                         fndecl, 4, init, build_int_cst (NULL_TREE, n),
1698
                         se.expr, se.string_length);
1699
  case_num = gfc_create_var (integer_type_node, "case_num");
1700
  gfc_add_modify (&block, case_num, tmp);
1701
 
1702
  gfc_add_block_to_block (&block, &se.post);
1703
 
1704
  tmp = gfc_finish_block (&body);
1705
  tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1706
  gfc_add_expr_to_block (&block, tmp);
1707
 
1708
  tmp = build1_v (LABEL_EXPR, end_label);
1709
  gfc_add_expr_to_block (&block, tmp);
1710
 
1711
  return gfc_finish_block (&block);
1712
}
1713
 
1714
 
1715
/* Translate the three variants of the SELECT CASE construct.
1716
 
1717
   SELECT CASEs with INTEGER case expressions can be translated to an
1718
   equivalent GENERIC switch statement, and for LOGICAL case
1719
   expressions we build one or two if-else compares.
1720
 
1721
   SELECT CASEs with CHARACTER case expressions are a whole different
1722
   story, because they don't exist in GENERIC.  So we sort them and
1723
   do a binary search at runtime.
1724
 
1725
   Fortran has no BREAK statement, and it does not allow jumps from
1726
   one case block to another.  That makes things a lot easier for
1727
   the optimizers.  */
1728
 
1729
tree
1730
gfc_trans_select (gfc_code * code)
1731
{
1732
  gcc_assert (code && code->expr1);
1733
 
1734
  /* Empty SELECT constructs are legal.  */
1735
  if (code->block == NULL)
1736
    return build_empty_stmt (input_location);
1737
 
1738
  /* Select the correct translation function.  */
1739
  switch (code->expr1->ts.type)
1740
    {
1741
    case BT_LOGICAL:    return gfc_trans_logical_select (code);
1742
    case BT_INTEGER:    return gfc_trans_integer_select (code);
1743
    case BT_CHARACTER:  return gfc_trans_character_select (code);
1744
    default:
1745
      gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1746
      /* Not reached */
1747
    }
1748
}
1749
 
1750
 
1751
/* Traversal function to substitute a replacement symtree if the symbol
1752
   in the expression is the same as that passed.  f == 2 signals that
1753
   that variable itself is not to be checked - only the references.
1754
   This group of functions is used when the variable expression in a
1755
   FORALL assignment has internal references.  For example:
1756
                FORALL (i = 1:4) p(p(i)) = i
1757
   The only recourse here is to store a copy of 'p' for the index
1758
   expression.  */
1759
 
1760
static gfc_symtree *new_symtree;
1761
static gfc_symtree *old_symtree;
1762
 
1763
static bool
1764
forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1765
{
1766
  if (expr->expr_type != EXPR_VARIABLE)
1767
    return false;
1768
 
1769
  if (*f == 2)
1770
    *f = 1;
1771
  else if (expr->symtree->n.sym == sym)
1772
    expr->symtree = new_symtree;
1773
 
1774
  return false;
1775
}
1776
 
1777
static void
1778
forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1779
{
1780
  gfc_traverse_expr (e, sym, forall_replace, f);
1781
}
1782
 
1783
static bool
1784
forall_restore (gfc_expr *expr,
1785
                gfc_symbol *sym ATTRIBUTE_UNUSED,
1786
                int *f ATTRIBUTE_UNUSED)
1787
{
1788
  if (expr->expr_type != EXPR_VARIABLE)
1789
    return false;
1790
 
1791
  if (expr->symtree == new_symtree)
1792
    expr->symtree = old_symtree;
1793
 
1794
  return false;
1795
}
1796
 
1797
static void
1798
forall_restore_symtree (gfc_expr *e)
1799
{
1800
  gfc_traverse_expr (e, NULL, forall_restore, 0);
1801
}
1802
 
1803
static void
1804
forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1805
{
1806
  gfc_se tse;
1807
  gfc_se rse;
1808
  gfc_expr *e;
1809
  gfc_symbol *new_sym;
1810
  gfc_symbol *old_sym;
1811
  gfc_symtree *root;
1812
  tree tmp;
1813
 
1814
  /* Build a copy of the lvalue.  */
1815
  old_symtree = c->expr1->symtree;
1816
  old_sym = old_symtree->n.sym;
1817
  e = gfc_lval_expr_from_sym (old_sym);
1818
  if (old_sym->attr.dimension)
1819
    {
1820
      gfc_init_se (&tse, NULL);
1821
      gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
1822
      gfc_add_block_to_block (pre, &tse.pre);
1823
      gfc_add_block_to_block (post, &tse.post);
1824
      tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
1825
 
1826
      if (e->ts.type != BT_CHARACTER)
1827
        {
1828
          /* Use the variable offset for the temporary.  */
1829
          tmp = gfc_conv_array_offset (old_sym->backend_decl);
1830
          gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
1831
        }
1832
    }
1833
  else
1834
    {
1835
      gfc_init_se (&tse, NULL);
1836
      gfc_init_se (&rse, NULL);
1837
      gfc_conv_expr (&rse, e);
1838
      if (e->ts.type == BT_CHARACTER)
1839
        {
1840
          tse.string_length = rse.string_length;
1841
          tmp = gfc_get_character_type_len (gfc_default_character_kind,
1842
                                            tse.string_length);
1843
          tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1844
                                          rse.string_length);
1845
          gfc_add_block_to_block (pre, &tse.pre);
1846
          gfc_add_block_to_block (post, &tse.post);
1847
        }
1848
      else
1849
        {
1850
          tmp = gfc_typenode_for_spec (&e->ts);
1851
          tse.expr = gfc_create_var (tmp, "temp");
1852
        }
1853
 
1854
      tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1855
                                     e->expr_type == EXPR_VARIABLE);
1856
      gfc_add_expr_to_block (pre, tmp);
1857
    }
1858
  gfc_free_expr (e);
1859
 
1860
  /* Create a new symbol to represent the lvalue.  */
1861
  new_sym = gfc_new_symbol (old_sym->name, NULL);
1862
  new_sym->ts = old_sym->ts;
1863
  new_sym->attr.referenced = 1;
1864
  new_sym->attr.temporary = 1;
1865
  new_sym->attr.dimension = old_sym->attr.dimension;
1866
  new_sym->attr.flavor = old_sym->attr.flavor;
1867
 
1868
  /* Use the temporary as the backend_decl.  */
1869
  new_sym->backend_decl = tse.expr;
1870
 
1871
  /* Create a fake symtree for it.  */
1872
  root = NULL;
1873
  new_symtree = gfc_new_symtree (&root, old_sym->name);
1874
  new_symtree->n.sym = new_sym;
1875
  gcc_assert (new_symtree == root);
1876
 
1877
  /* Go through the expression reference replacing the old_symtree
1878
     with the new.  */
1879
  forall_replace_symtree (c->expr1, old_sym, 2);
1880
 
1881
  /* Now we have made this temporary, we might as well use it for
1882
  the right hand side.  */
1883
  forall_replace_symtree (c->expr2, old_sym, 1);
1884
}
1885
 
1886
 
1887
/* Handles dependencies in forall assignments.  */
1888
static int
1889
check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1890
{
1891
  gfc_ref *lref;
1892
  gfc_ref *rref;
1893
  int need_temp;
1894
  gfc_symbol *lsym;
1895
 
1896
  lsym = c->expr1->symtree->n.sym;
1897
  need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
1898
 
1899
  /* Now check for dependencies within the 'variable'
1900
     expression itself.  These are treated by making a complete
1901
     copy of variable and changing all the references to it
1902
     point to the copy instead.  Note that the shallow copy of
1903
     the variable will not suffice for derived types with
1904
     pointer components.  We therefore leave these to their
1905
     own devices.  */
1906
  if (lsym->ts.type == BT_DERIVED
1907
        && lsym->ts.u.derived->attr.pointer_comp)
1908
    return need_temp;
1909
 
1910
  new_symtree = NULL;
1911
  if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
1912
    {
1913
      forall_make_variable_temp (c, pre, post);
1914
      need_temp = 0;
1915
    }
1916
 
1917
  /* Substrings with dependencies are treated in the same
1918
     way.  */
1919
  if (c->expr1->ts.type == BT_CHARACTER
1920
        && c->expr1->ref
1921
        && c->expr2->expr_type == EXPR_VARIABLE
1922
        && lsym == c->expr2->symtree->n.sym)
1923
    {
1924
      for (lref = c->expr1->ref; lref; lref = lref->next)
1925
        if (lref->type == REF_SUBSTRING)
1926
          break;
1927
      for (rref = c->expr2->ref; rref; rref = rref->next)
1928
        if (rref->type == REF_SUBSTRING)
1929
          break;
1930
 
1931
      if (rref && lref
1932
            && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1933
        {
1934
          forall_make_variable_temp (c, pre, post);
1935
          need_temp = 0;
1936
        }
1937
    }
1938
  return need_temp;
1939
}
1940
 
1941
 
1942
static void
1943
cleanup_forall_symtrees (gfc_code *c)
1944
{
1945
  forall_restore_symtree (c->expr1);
1946
  forall_restore_symtree (c->expr2);
1947
  gfc_free (new_symtree->n.sym);
1948
  gfc_free (new_symtree);
1949
}
1950
 
1951
 
1952
/* Generate the loops for a FORALL block, specified by FORALL_TMP.  BODY
1953
   is the contents of the FORALL block/stmt to be iterated.  MASK_FLAG
1954
   indicates whether we should generate code to test the FORALLs mask
1955
   array.  OUTER is the loop header to be used for initializing mask
1956
   indices.
1957
 
1958
   The generated loop format is:
1959
    count = (end - start + step) / step
1960
    loopvar = start
1961
    while (1)
1962
      {
1963
        if (count <=0 )
1964
          goto end_of_loop
1965
        <body>
1966
        loopvar += step
1967
        count --
1968
      }
1969
    end_of_loop:  */
1970
 
1971
static tree
1972
gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1973
                       int mask_flag, stmtblock_t *outer)
1974
{
1975
  int n, nvar;
1976
  tree tmp;
1977
  tree cond;
1978
  stmtblock_t block;
1979
  tree exit_label;
1980
  tree count;
1981
  tree var, start, end, step;
1982
  iter_info *iter;
1983
 
1984
  /* Initialize the mask index outside the FORALL nest.  */
1985
  if (mask_flag && forall_tmp->mask)
1986
    gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
1987
 
1988
  iter = forall_tmp->this_loop;
1989
  nvar = forall_tmp->nvar;
1990
  for (n = 0; n < nvar; n++)
1991
    {
1992
      var = iter->var;
1993
      start = iter->start;
1994
      end = iter->end;
1995
      step = iter->step;
1996
 
1997
      exit_label = gfc_build_label_decl (NULL_TREE);
1998
      TREE_USED (exit_label) = 1;
1999
 
2000
      /* The loop counter.  */
2001
      count = gfc_create_var (TREE_TYPE (var), "count");
2002
 
2003
      /* The body of the loop.  */
2004
      gfc_init_block (&block);
2005
 
2006
      /* The exit condition.  */
2007
      cond = fold_build2 (LE_EXPR, boolean_type_node,
2008
                          count, build_int_cst (TREE_TYPE (count), 0));
2009
      tmp = build1_v (GOTO_EXPR, exit_label);
2010
      tmp = fold_build3 (COND_EXPR, void_type_node,
2011
                         cond, tmp, build_empty_stmt (input_location));
2012
      gfc_add_expr_to_block (&block, tmp);
2013
 
2014
      /* The main loop body.  */
2015
      gfc_add_expr_to_block (&block, body);
2016
 
2017
      /* Increment the loop variable.  */
2018
      tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
2019
      gfc_add_modify (&block, var, tmp);
2020
 
2021
      /* Advance to the next mask element.  Only do this for the
2022
         innermost loop.  */
2023
      if (n == 0 && mask_flag && forall_tmp->mask)
2024
        {
2025
          tree maskindex = forall_tmp->maskindex;
2026
          tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2027
                             maskindex, gfc_index_one_node);
2028
          gfc_add_modify (&block, maskindex, tmp);
2029
        }
2030
 
2031
      /* Decrement the loop counter.  */
2032
      tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
2033
                         build_int_cst (TREE_TYPE (var), 1));
2034
      gfc_add_modify (&block, count, tmp);
2035
 
2036
      body = gfc_finish_block (&block);
2037
 
2038
      /* Loop var initialization.  */
2039
      gfc_init_block (&block);
2040
      gfc_add_modify (&block, var, start);
2041
 
2042
 
2043
      /* Initialize the loop counter.  */
2044
      tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
2045
      tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
2046
      tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
2047
      gfc_add_modify (&block, count, tmp);
2048
 
2049
      /* The loop expression.  */
2050
      tmp = build1_v (LOOP_EXPR, body);
2051
      gfc_add_expr_to_block (&block, tmp);
2052
 
2053
      /* The exit label.  */
2054
      tmp = build1_v (LABEL_EXPR, exit_label);
2055
      gfc_add_expr_to_block (&block, tmp);
2056
 
2057
      body = gfc_finish_block (&block);
2058
      iter = iter->next;
2059
    }
2060
  return body;
2061
}
2062
 
2063
 
2064
/* Generate the body and loops according to MASK_FLAG.  If MASK_FLAG
2065
   is nonzero, the body is controlled by all masks in the forall nest.
2066
   Otherwise, the innermost loop is not controlled by it's mask.  This
2067
   is used for initializing that mask.  */
2068
 
2069
static tree
2070
gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2071
                              int mask_flag)
2072
{
2073
  tree tmp;
2074
  stmtblock_t header;
2075
  forall_info *forall_tmp;
2076
  tree mask, maskindex;
2077
 
2078
  gfc_start_block (&header);
2079
 
2080
  forall_tmp = nested_forall_info;
2081
  while (forall_tmp != NULL)
2082
    {
2083
      /* Generate body with masks' control.  */
2084
      if (mask_flag)
2085
        {
2086
          mask = forall_tmp->mask;
2087
          maskindex = forall_tmp->maskindex;
2088
 
2089
          /* If a mask was specified make the assignment conditional.  */
2090
          if (mask)
2091
            {
2092
              tmp = gfc_build_array_ref (mask, maskindex, NULL);
2093
              body = build3_v (COND_EXPR, tmp, body,
2094
                               build_empty_stmt (input_location));
2095
            }
2096
        }
2097
      body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2098
      forall_tmp = forall_tmp->prev_nest;
2099
      mask_flag = 1;
2100
    }
2101
 
2102
  gfc_add_expr_to_block (&header, body);
2103
  return gfc_finish_block (&header);
2104
}
2105
 
2106
 
2107
/* Allocate data for holding a temporary array.  Returns either a local
2108
   temporary array or a pointer variable.  */
2109
 
2110
static tree
2111
gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2112
                 tree elem_type)
2113
{
2114
  tree tmpvar;
2115
  tree type;
2116
  tree tmp;
2117
 
2118
  if (INTEGER_CST_P (size))
2119
    {
2120
      tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
2121
                         gfc_index_one_node);
2122
    }
2123
  else
2124
    tmp = NULL_TREE;
2125
 
2126
  type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2127
  type = build_array_type (elem_type, type);
2128
  if (gfc_can_put_var_on_stack (bytesize))
2129
    {
2130
      gcc_assert (INTEGER_CST_P (size));
2131
      tmpvar = gfc_create_var (type, "temp");
2132
      *pdata = NULL_TREE;
2133
    }
2134
  else
2135
    {
2136
      tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2137
      *pdata = convert (pvoid_type_node, tmpvar);
2138
 
2139
      tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2140
      gfc_add_modify (pblock, tmpvar, tmp);
2141
    }
2142
  return tmpvar;
2143
}
2144
 
2145
 
2146
/* Generate codes to copy the temporary to the actual lhs.  */
2147
 
2148
static tree
2149
generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2150
                               tree count1, tree wheremask, bool invert)
2151
{
2152
  gfc_ss *lss;
2153
  gfc_se lse, rse;
2154
  stmtblock_t block, body;
2155
  gfc_loopinfo loop1;
2156
  tree tmp;
2157
  tree wheremaskexpr;
2158
 
2159
  /* Walk the lhs.  */
2160
  lss = gfc_walk_expr (expr);
2161
 
2162
  if (lss == gfc_ss_terminator)
2163
    {
2164
      gfc_start_block (&block);
2165
 
2166
      gfc_init_se (&lse, NULL);
2167
 
2168
      /* Translate the expression.  */
2169
      gfc_conv_expr (&lse, expr);
2170
 
2171
      /* Form the expression for the temporary.  */
2172
      tmp = gfc_build_array_ref (tmp1, count1, NULL);
2173
 
2174
      /* Use the scalar assignment as is.  */
2175
      gfc_add_block_to_block (&block, &lse.pre);
2176
      gfc_add_modify (&block, lse.expr, tmp);
2177
      gfc_add_block_to_block (&block, &lse.post);
2178
 
2179
      /* Increment the count1.  */
2180
      tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2181
                         gfc_index_one_node);
2182
      gfc_add_modify (&block, count1, tmp);
2183
 
2184
      tmp = gfc_finish_block (&block);
2185
    }
2186
  else
2187
    {
2188
      gfc_start_block (&block);
2189
 
2190
      gfc_init_loopinfo (&loop1);
2191
      gfc_init_se (&rse, NULL);
2192
      gfc_init_se (&lse, NULL);
2193
 
2194
      /* Associate the lss with the loop.  */
2195
      gfc_add_ss_to_loop (&loop1, lss);
2196
 
2197
      /* Calculate the bounds of the scalarization.  */
2198
      gfc_conv_ss_startstride (&loop1);
2199
      /* Setup the scalarizing loops.  */
2200
      gfc_conv_loop_setup (&loop1, &expr->where);
2201
 
2202
      gfc_mark_ss_chain_used (lss, 1);
2203
 
2204
      /* Start the scalarized loop body.  */
2205
      gfc_start_scalarized_body (&loop1, &body);
2206
 
2207
      /* Setup the gfc_se structures.  */
2208
      gfc_copy_loopinfo_to_se (&lse, &loop1);
2209
      lse.ss = lss;
2210
 
2211
      /* Form the expression of the temporary.  */
2212
      if (lss != gfc_ss_terminator)
2213
        rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2214
      /* Translate expr.  */
2215
      gfc_conv_expr (&lse, expr);
2216
 
2217
      /* Use the scalar assignment.  */
2218
      rse.string_length = lse.string_length;
2219
      tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
2220
 
2221
      /* Form the mask expression according to the mask tree list.  */
2222
      if (wheremask)
2223
        {
2224
          wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2225
          if (invert)
2226
            wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2227
                                         TREE_TYPE (wheremaskexpr),
2228
                                         wheremaskexpr);
2229
          tmp = fold_build3 (COND_EXPR, void_type_node,
2230
                             wheremaskexpr, tmp,
2231
                             build_empty_stmt (input_location));
2232
       }
2233
 
2234
      gfc_add_expr_to_block (&body, tmp);
2235
 
2236
      /* Increment count1.  */
2237
      tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2238
                         count1, gfc_index_one_node);
2239
      gfc_add_modify (&body, count1, tmp);
2240
 
2241
      /* Increment count3.  */
2242
      if (count3)
2243
        {
2244
          tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2245
                             count3, gfc_index_one_node);
2246
          gfc_add_modify (&body, count3, tmp);
2247
        }
2248
 
2249
      /* Generate the copying loops.  */
2250
      gfc_trans_scalarizing_loops (&loop1, &body);
2251
      gfc_add_block_to_block (&block, &loop1.pre);
2252
      gfc_add_block_to_block (&block, &loop1.post);
2253
      gfc_cleanup_loop (&loop1);
2254
 
2255
      tmp = gfc_finish_block (&block);
2256
    }
2257
  return tmp;
2258
}
2259
 
2260
 
2261
/* Generate codes to copy rhs to the temporary. TMP1 is the address of
2262
   temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2263
   and should not be freed.  WHEREMASK is the conditional execution mask
2264
   whose sense may be inverted by INVERT.  */
2265
 
2266
static tree
2267
generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2268
                               tree count1, gfc_ss *lss, gfc_ss *rss,
2269
                               tree wheremask, bool invert)
2270
{
2271
  stmtblock_t block, body1;
2272
  gfc_loopinfo loop;
2273
  gfc_se lse;
2274
  gfc_se rse;
2275
  tree tmp;
2276
  tree wheremaskexpr;
2277
 
2278
  gfc_start_block (&block);
2279
 
2280
  gfc_init_se (&rse, NULL);
2281
  gfc_init_se (&lse, NULL);
2282
 
2283
  if (lss == gfc_ss_terminator)
2284
    {
2285
      gfc_init_block (&body1);
2286
      gfc_conv_expr (&rse, expr2);
2287
      lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2288
    }
2289
  else
2290
    {
2291
      /* Initialize the loop.  */
2292
      gfc_init_loopinfo (&loop);
2293
 
2294
      /* We may need LSS to determine the shape of the expression.  */
2295
      gfc_add_ss_to_loop (&loop, lss);
2296
      gfc_add_ss_to_loop (&loop, rss);
2297
 
2298
      gfc_conv_ss_startstride (&loop);
2299
      gfc_conv_loop_setup (&loop, &expr2->where);
2300
 
2301
      gfc_mark_ss_chain_used (rss, 1);
2302
      /* Start the loop body.  */
2303
      gfc_start_scalarized_body (&loop, &body1);
2304
 
2305
      /* Translate the expression.  */
2306
      gfc_copy_loopinfo_to_se (&rse, &loop);
2307
      rse.ss = rss;
2308
      gfc_conv_expr (&rse, expr2);
2309
 
2310
      /* Form the expression of the temporary.  */
2311
      lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2312
    }
2313
 
2314
  /* Use the scalar assignment.  */
2315
  lse.string_length = rse.string_length;
2316
  tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2317
                                 expr2->expr_type == EXPR_VARIABLE);
2318
 
2319
  /* Form the mask expression according to the mask tree list.  */
2320
  if (wheremask)
2321
    {
2322
      wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2323
      if (invert)
2324
        wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2325
                                     TREE_TYPE (wheremaskexpr),
2326
                                     wheremaskexpr);
2327
      tmp = fold_build3 (COND_EXPR, void_type_node,
2328
                         wheremaskexpr, tmp, build_empty_stmt (input_location));
2329
    }
2330
 
2331
  gfc_add_expr_to_block (&body1, tmp);
2332
 
2333
  if (lss == gfc_ss_terminator)
2334
    {
2335
      gfc_add_block_to_block (&block, &body1);
2336
 
2337
      /* Increment count1.  */
2338
      tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2339
                         gfc_index_one_node);
2340
      gfc_add_modify (&block, count1, tmp);
2341
    }
2342
  else
2343
    {
2344
      /* Increment count1.  */
2345
      tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2346
                         count1, gfc_index_one_node);
2347
      gfc_add_modify (&body1, count1, tmp);
2348
 
2349
      /* Increment count3.  */
2350
      if (count3)
2351
        {
2352
          tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2353
                             count3, gfc_index_one_node);
2354
          gfc_add_modify (&body1, count3, tmp);
2355
        }
2356
 
2357
      /* Generate the copying loops.  */
2358
      gfc_trans_scalarizing_loops (&loop, &body1);
2359
 
2360
      gfc_add_block_to_block (&block, &loop.pre);
2361
      gfc_add_block_to_block (&block, &loop.post);
2362
 
2363
      gfc_cleanup_loop (&loop);
2364
      /* TODO: Reuse lss and rss when copying temp->lhs.  Need to be careful
2365
         as tree nodes in SS may not be valid in different scope.  */
2366
    }
2367
 
2368
  tmp = gfc_finish_block (&block);
2369
  return tmp;
2370
}
2371
 
2372
 
2373
/* Calculate the size of temporary needed in the assignment inside forall.
2374
   LSS and RSS are filled in this function.  */
2375
 
2376
static tree
2377
compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2378
                         stmtblock_t * pblock,
2379
                         gfc_ss **lss, gfc_ss **rss)
2380
{
2381
  gfc_loopinfo loop;
2382
  tree size;
2383
  int i;
2384
  int save_flag;
2385
  tree tmp;
2386
 
2387
  *lss = gfc_walk_expr (expr1);
2388
  *rss = NULL;
2389
 
2390
  size = gfc_index_one_node;
2391
  if (*lss != gfc_ss_terminator)
2392
    {
2393
      gfc_init_loopinfo (&loop);
2394
 
2395
      /* Walk the RHS of the expression.  */
2396
      *rss = gfc_walk_expr (expr2);
2397
      if (*rss == gfc_ss_terminator)
2398
        {
2399
          /* The rhs is scalar.  Add a ss for the expression.  */
2400
          *rss = gfc_get_ss ();
2401
          (*rss)->next = gfc_ss_terminator;
2402
          (*rss)->type = GFC_SS_SCALAR;
2403
          (*rss)->expr = expr2;
2404
        }
2405
 
2406
      /* Associate the SS with the loop.  */
2407
      gfc_add_ss_to_loop (&loop, *lss);
2408
      /* We don't actually need to add the rhs at this point, but it might
2409
         make guessing the loop bounds a bit easier.  */
2410
      gfc_add_ss_to_loop (&loop, *rss);
2411
 
2412
      /* We only want the shape of the expression, not rest of the junk
2413
         generated by the scalarizer.  */
2414
      loop.array_parameter = 1;
2415
 
2416
      /* Calculate the bounds of the scalarization.  */
2417
      save_flag = gfc_option.rtcheck;
2418
      gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2419
      gfc_conv_ss_startstride (&loop);
2420
      gfc_option.rtcheck = save_flag;
2421
      gfc_conv_loop_setup (&loop, &expr2->where);
2422
 
2423
      /* Figure out how many elements we need.  */
2424
      for (i = 0; i < loop.dimen; i++)
2425
        {
2426
          tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2427
                             gfc_index_one_node, loop.from[i]);
2428
          tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2429
                             tmp, loop.to[i]);
2430
          size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2431
        }
2432
      gfc_add_block_to_block (pblock, &loop.pre);
2433
      size = gfc_evaluate_now (size, pblock);
2434
      gfc_add_block_to_block (pblock, &loop.post);
2435
 
2436
      /* TODO: write a function that cleans up a loopinfo without freeing
2437
         the SS chains.  Currently a NOP.  */
2438
    }
2439
 
2440
  return size;
2441
}
2442
 
2443
 
2444
/* Calculate the overall iterator number of the nested forall construct.
2445
   This routine actually calculates the number of times the body of the
2446
   nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2447
   that by the expression INNER_SIZE.  The BLOCK argument specifies the
2448
   block in which to calculate the result, and the optional INNER_SIZE_BODY
2449
   argument contains any statements that need to executed (inside the loop)
2450
   to initialize or calculate INNER_SIZE.  */
2451
 
2452
static tree
2453
compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2454
                             stmtblock_t *inner_size_body, stmtblock_t *block)
2455
{
2456
  forall_info *forall_tmp = nested_forall_info;
2457
  tree tmp, number;
2458
  stmtblock_t body;
2459
 
2460
  /* We can eliminate the innermost unconditional loops with constant
2461
     array bounds.  */
2462
  if (INTEGER_CST_P (inner_size))
2463
    {
2464
      while (forall_tmp
2465
             && !forall_tmp->mask
2466
             && INTEGER_CST_P (forall_tmp->size))
2467
        {
2468
          inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2469
                                    inner_size, forall_tmp->size);
2470
          forall_tmp = forall_tmp->prev_nest;
2471
        }
2472
 
2473
      /* If there are no loops left, we have our constant result.  */
2474
      if (!forall_tmp)
2475
        return inner_size;
2476
    }
2477
 
2478
  /* Otherwise, create a temporary variable to compute the result.  */
2479
  number = gfc_create_var (gfc_array_index_type, "num");
2480
  gfc_add_modify (block, number, gfc_index_zero_node);
2481
 
2482
  gfc_start_block (&body);
2483
  if (inner_size_body)
2484
    gfc_add_block_to_block (&body, inner_size_body);
2485
  if (forall_tmp)
2486
    tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2487
                       number, inner_size);
2488
  else
2489
    tmp = inner_size;
2490
  gfc_add_modify (&body, number, tmp);
2491
  tmp = gfc_finish_block (&body);
2492
 
2493
  /* Generate loops.  */
2494
  if (forall_tmp != NULL)
2495
    tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2496
 
2497
  gfc_add_expr_to_block (block, tmp);
2498
 
2499
  return number;
2500
}
2501
 
2502
 
2503
/* Allocate temporary for forall construct.  SIZE is the size of temporary
2504
   needed.  PTEMP1 is returned for space free.  */
2505
 
2506
static tree
2507
allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2508
                                 tree * ptemp1)
2509
{
2510
  tree bytesize;
2511
  tree unit;
2512
  tree tmp;
2513
 
2514
  unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2515
  if (!integer_onep (unit))
2516
    bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2517
  else
2518
    bytesize = size;
2519
 
2520
  *ptemp1 = NULL;
2521
  tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2522
 
2523
  if (*ptemp1)
2524
    tmp = build_fold_indirect_ref_loc (input_location, tmp);
2525
  return tmp;
2526
}
2527
 
2528
 
2529
/* Allocate temporary for forall construct according to the information in
2530
   nested_forall_info.  INNER_SIZE is the size of temporary needed in the
2531
   assignment inside forall.  PTEMP1 is returned for space free.  */
2532
 
2533
static tree
2534
allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2535
                               tree inner_size, stmtblock_t * inner_size_body,
2536
                               stmtblock_t * block, tree * ptemp1)
2537
{
2538
  tree size;
2539
 
2540
  /* Calculate the total size of temporary needed in forall construct.  */
2541
  size = compute_overall_iter_number (nested_forall_info, inner_size,
2542
                                      inner_size_body, block);
2543
 
2544
  return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2545
}
2546
 
2547
 
2548
/* Handle assignments inside forall which need temporary.
2549
 
2550
    forall (i=start:end:stride; maskexpr)
2551
      e<i> = f<i>
2552
    end forall
2553
   (where e,f<i> are arbitrary expressions possibly involving i
2554
    and there is a dependency between e<i> and f<i>)
2555
   Translates to:
2556
    masktmp(:) = maskexpr(:)
2557
 
2558
    maskindex = 0;
2559
    count1 = 0;
2560
    num = 0;
2561
    for (i = start; i <= end; i += stride)
2562
      num += SIZE (f<i>)
2563
    count1 = 0;
2564
    ALLOCATE (tmp(num))
2565
    for (i = start; i <= end; i += stride)
2566
      {
2567
        if (masktmp[maskindex++])
2568
          tmp[count1++] = f<i>
2569
      }
2570
    maskindex = 0;
2571
    count1 = 0;
2572
    for (i = start; i <= end; i += stride)
2573
      {
2574
        if (masktmp[maskindex++])
2575
          e<i> = tmp[count1++]
2576
      }
2577
    DEALLOCATE (tmp)
2578
  */
2579
static void
2580
gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2581
                            tree wheremask, bool invert,
2582
                            forall_info * nested_forall_info,
2583
                            stmtblock_t * block)
2584
{
2585
  tree type;
2586
  tree inner_size;
2587
  gfc_ss *lss, *rss;
2588
  tree count, count1;
2589
  tree tmp, tmp1;
2590
  tree ptemp1;
2591
  stmtblock_t inner_size_body;
2592
 
2593
  /* Create vars. count1 is the current iterator number of the nested
2594
     forall.  */
2595
  count1 = gfc_create_var (gfc_array_index_type, "count1");
2596
 
2597
  /* Count is the wheremask index.  */
2598
  if (wheremask)
2599
    {
2600
      count = gfc_create_var (gfc_array_index_type, "count");
2601
      gfc_add_modify (block, count, gfc_index_zero_node);
2602
    }
2603
  else
2604
    count = NULL;
2605
 
2606
  /* Initialize count1.  */
2607
  gfc_add_modify (block, count1, gfc_index_zero_node);
2608
 
2609
  /* Calculate the size of temporary needed in the assignment. Return loop, lss
2610
     and rss which are used in function generate_loop_for_rhs_to_temp().  */
2611
  gfc_init_block (&inner_size_body);
2612
  inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2613
                                        &lss, &rss);
2614
 
2615
  /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2616
  if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
2617
    {
2618
      if (!expr1->ts.u.cl->backend_decl)
2619
        {
2620
          gfc_se tse;
2621
          gfc_init_se (&tse, NULL);
2622
          gfc_conv_expr (&tse, expr1->ts.u.cl->length);
2623
          expr1->ts.u.cl->backend_decl = tse.expr;
2624
        }
2625
      type = gfc_get_character_type_len (gfc_default_character_kind,
2626
                                         expr1->ts.u.cl->backend_decl);
2627
    }
2628
  else
2629
    type = gfc_typenode_for_spec (&expr1->ts);
2630
 
2631
  /* Allocate temporary for nested forall construct according to the
2632
     information in nested_forall_info and inner_size.  */
2633
  tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2634
                                        &inner_size_body, block, &ptemp1);
2635
 
2636
  /* Generate codes to copy rhs to the temporary .  */
2637
  tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2638
                                       wheremask, invert);
2639
 
2640
  /* Generate body and loops according to the information in
2641
     nested_forall_info.  */
2642
  tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2643
  gfc_add_expr_to_block (block, tmp);
2644
 
2645
  /* Reset count1.  */
2646
  gfc_add_modify (block, count1, gfc_index_zero_node);
2647
 
2648
  /* Reset count.  */
2649
  if (wheremask)
2650
    gfc_add_modify (block, count, gfc_index_zero_node);
2651
 
2652
  /* Generate codes to copy the temporary to lhs.  */
2653
  tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2654
                                       wheremask, invert);
2655
 
2656
  /* Generate body and loops according to the information in
2657
     nested_forall_info.  */
2658
  tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2659
  gfc_add_expr_to_block (block, tmp);
2660
 
2661
  if (ptemp1)
2662
    {
2663
      /* Free the temporary.  */
2664
      tmp = gfc_call_free (ptemp1);
2665
      gfc_add_expr_to_block (block, tmp);
2666
    }
2667
}
2668
 
2669
 
2670
/* Translate pointer assignment inside FORALL which need temporary.  */
2671
 
2672
static void
2673
gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2674
                                    forall_info * nested_forall_info,
2675
                                    stmtblock_t * block)
2676
{
2677
  tree type;
2678
  tree inner_size;
2679
  gfc_ss *lss, *rss;
2680
  gfc_se lse;
2681
  gfc_se rse;
2682
  gfc_ss_info *info;
2683
  gfc_loopinfo loop;
2684
  tree desc;
2685
  tree parm;
2686
  tree parmtype;
2687
  stmtblock_t body;
2688
  tree count;
2689
  tree tmp, tmp1, ptemp1;
2690
 
2691
  count = gfc_create_var (gfc_array_index_type, "count");
2692
  gfc_add_modify (block, count, gfc_index_zero_node);
2693
 
2694
  inner_size = integer_one_node;
2695
  lss = gfc_walk_expr (expr1);
2696
  rss = gfc_walk_expr (expr2);
2697
  if (lss == gfc_ss_terminator)
2698
    {
2699
      type = gfc_typenode_for_spec (&expr1->ts);
2700
      type = build_pointer_type (type);
2701
 
2702
      /* Allocate temporary for nested forall construct according to the
2703
         information in nested_forall_info and inner_size.  */
2704
      tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2705
                                            inner_size, NULL, block, &ptemp1);
2706
      gfc_start_block (&body);
2707
      gfc_init_se (&lse, NULL);
2708
      lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2709
      gfc_init_se (&rse, NULL);
2710
      rse.want_pointer = 1;
2711
      gfc_conv_expr (&rse, expr2);
2712
      gfc_add_block_to_block (&body, &rse.pre);
2713
      gfc_add_modify (&body, lse.expr,
2714
                           fold_convert (TREE_TYPE (lse.expr), rse.expr));
2715
      gfc_add_block_to_block (&body, &rse.post);
2716
 
2717
      /* Increment count.  */
2718
      tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2719
                         count, gfc_index_one_node);
2720
      gfc_add_modify (&body, count, tmp);
2721
 
2722
      tmp = gfc_finish_block (&body);
2723
 
2724
      /* Generate body and loops according to the information in
2725
         nested_forall_info.  */
2726
      tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2727
      gfc_add_expr_to_block (block, tmp);
2728
 
2729
      /* Reset count.  */
2730
      gfc_add_modify (block, count, gfc_index_zero_node);
2731
 
2732
      gfc_start_block (&body);
2733
      gfc_init_se (&lse, NULL);
2734
      gfc_init_se (&rse, NULL);
2735
      rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2736
      lse.want_pointer = 1;
2737
      gfc_conv_expr (&lse, expr1);
2738
      gfc_add_block_to_block (&body, &lse.pre);
2739
      gfc_add_modify (&body, lse.expr, rse.expr);
2740
      gfc_add_block_to_block (&body, &lse.post);
2741
      /* Increment count.  */
2742
      tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2743
                         count, gfc_index_one_node);
2744
      gfc_add_modify (&body, count, tmp);
2745
      tmp = gfc_finish_block (&body);
2746
 
2747
      /* Generate body and loops according to the information in
2748
         nested_forall_info.  */
2749
      tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2750
      gfc_add_expr_to_block (block, tmp);
2751
    }
2752
  else
2753
    {
2754
      gfc_init_loopinfo (&loop);
2755
 
2756
      /* Associate the SS with the loop.  */
2757
      gfc_add_ss_to_loop (&loop, rss);
2758
 
2759
      /* Setup the scalarizing loops and bounds.  */
2760
      gfc_conv_ss_startstride (&loop);
2761
 
2762
      gfc_conv_loop_setup (&loop, &expr2->where);
2763
 
2764
      info = &rss->data.info;
2765
      desc = info->descriptor;
2766
 
2767
      /* Make a new descriptor.  */
2768
      parmtype = gfc_get_element_type (TREE_TYPE (desc));
2769
      parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2770
                                            loop.from, loop.to, 1,
2771
                                            GFC_ARRAY_UNKNOWN, true);
2772
 
2773
      /* Allocate temporary for nested forall construct.  */
2774
      tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2775
                                            inner_size, NULL, block, &ptemp1);
2776
      gfc_start_block (&body);
2777
      gfc_init_se (&lse, NULL);
2778
      lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2779
      lse.direct_byref = 1;
2780
      rss = gfc_walk_expr (expr2);
2781
      gfc_conv_expr_descriptor (&lse, expr2, rss);
2782
 
2783
      gfc_add_block_to_block (&body, &lse.pre);
2784
      gfc_add_block_to_block (&body, &lse.post);
2785
 
2786
      /* Increment count.  */
2787
      tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2788
                         count, gfc_index_one_node);
2789
      gfc_add_modify (&body, count, tmp);
2790
 
2791
      tmp = gfc_finish_block (&body);
2792
 
2793
      /* Generate body and loops according to the information in
2794
         nested_forall_info.  */
2795
      tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2796
      gfc_add_expr_to_block (block, tmp);
2797
 
2798
      /* Reset count.  */
2799
      gfc_add_modify (block, count, gfc_index_zero_node);
2800
 
2801
      parm = gfc_build_array_ref (tmp1, count, NULL);
2802
      lss = gfc_walk_expr (expr1);
2803
      gfc_init_se (&lse, NULL);
2804
      gfc_conv_expr_descriptor (&lse, expr1, lss);
2805
      gfc_add_modify (&lse.pre, lse.expr, parm);
2806
      gfc_start_block (&body);
2807
      gfc_add_block_to_block (&body, &lse.pre);
2808
      gfc_add_block_to_block (&body, &lse.post);
2809
 
2810
      /* Increment count.  */
2811
      tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2812
                         count, gfc_index_one_node);
2813
      gfc_add_modify (&body, count, tmp);
2814
 
2815
      tmp = gfc_finish_block (&body);
2816
 
2817
      tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2818
      gfc_add_expr_to_block (block, tmp);
2819
    }
2820
  /* Free the temporary.  */
2821
  if (ptemp1)
2822
    {
2823
      tmp = gfc_call_free (ptemp1);
2824
      gfc_add_expr_to_block (block, tmp);
2825
    }
2826
}
2827
 
2828
 
2829
/* FORALL and WHERE statements are really nasty, especially when you nest
2830
   them. All the rhs of a forall assignment must be evaluated before the
2831
   actual assignments are performed. Presumably this also applies to all the
2832
   assignments in an inner where statement.  */
2833
 
2834
/* Generate code for a FORALL statement.  Any temporaries are allocated as a
2835
   linear array, relying on the fact that we process in the same order in all
2836
   loops.
2837
 
2838
    forall (i=start:end:stride; maskexpr)
2839
      e<i> = f<i>
2840
      g<i> = h<i>
2841
    end forall
2842
   (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2843
   Translates to:
2844
    count = ((end + 1 - start) / stride)
2845
    masktmp(:) = maskexpr(:)
2846
 
2847
    maskindex = 0;
2848
    for (i = start; i <= end; i += stride)
2849
      {
2850
        if (masktmp[maskindex++])
2851
          e<i> = f<i>
2852
      }
2853
    maskindex = 0;
2854
    for (i = start; i <= end; i += stride)
2855
      {
2856
        if (masktmp[maskindex++])
2857
          g<i> = h<i>
2858
      }
2859
 
2860
    Note that this code only works when there are no dependencies.
2861
    Forall loop with array assignments and data dependencies are a real pain,
2862
    because the size of the temporary cannot always be determined before the
2863
    loop is executed.  This problem is compounded by the presence of nested
2864
    FORALL constructs.
2865
 */
2866
 
2867
static tree
2868
gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2869
{
2870
  stmtblock_t pre;
2871
  stmtblock_t post;
2872
  stmtblock_t block;
2873
  stmtblock_t body;
2874
  tree *var;
2875
  tree *start;
2876
  tree *end;
2877
  tree *step;
2878
  gfc_expr **varexpr;
2879
  tree tmp;
2880
  tree assign;
2881
  tree size;
2882
  tree maskindex;
2883
  tree mask;
2884
  tree pmask;
2885
  int n;
2886
  int nvar;
2887
  int need_temp;
2888
  gfc_forall_iterator *fa;
2889
  gfc_se se;
2890
  gfc_code *c;
2891
  gfc_saved_var *saved_vars;
2892
  iter_info *this_forall;
2893
  forall_info *info;
2894
  bool need_mask;
2895
 
2896
  /* Do nothing if the mask is false.  */
2897
  if (code->expr1
2898
      && code->expr1->expr_type == EXPR_CONSTANT
2899
      && !code->expr1->value.logical)
2900
    return build_empty_stmt (input_location);
2901
 
2902
  n = 0;
2903
  /* Count the FORALL index number.  */
2904
  for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2905
    n++;
2906
  nvar = n;
2907
 
2908
  /* Allocate the space for var, start, end, step, varexpr.  */
2909
  var = (tree *) gfc_getmem (nvar * sizeof (tree));
2910
  start = (tree *) gfc_getmem (nvar * sizeof (tree));
2911
  end = (tree *) gfc_getmem (nvar * sizeof (tree));
2912
  step = (tree *) gfc_getmem (nvar * sizeof (tree));
2913
  varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2914
  saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2915
 
2916
  /* Allocate the space for info.  */
2917
  info = (forall_info *) gfc_getmem (sizeof (forall_info));
2918
 
2919
  gfc_start_block (&pre);
2920
  gfc_init_block (&post);
2921
  gfc_init_block (&block);
2922
 
2923
  n = 0;
2924
  for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2925
    {
2926
      gfc_symbol *sym = fa->var->symtree->n.sym;
2927
 
2928
      /* Allocate space for this_forall.  */
2929
      this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2930
 
2931
      /* Create a temporary variable for the FORALL index.  */
2932
      tmp = gfc_typenode_for_spec (&sym->ts);
2933
      var[n] = gfc_create_var (tmp, sym->name);
2934
      gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2935
 
2936
      /* Record it in this_forall.  */
2937
      this_forall->var = var[n];
2938
 
2939
      /* Replace the index symbol's backend_decl with the temporary decl.  */
2940
      sym->backend_decl = var[n];
2941
 
2942
      /* Work out the start, end and stride for the loop.  */
2943
      gfc_init_se (&se, NULL);
2944
      gfc_conv_expr_val (&se, fa->start);
2945
      /* Record it in this_forall.  */
2946
      this_forall->start = se.expr;
2947
      gfc_add_block_to_block (&block, &se.pre);
2948
      start[n] = se.expr;
2949
 
2950
      gfc_init_se (&se, NULL);
2951
      gfc_conv_expr_val (&se, fa->end);
2952
      /* Record it in this_forall.  */
2953
      this_forall->end = se.expr;
2954
      gfc_make_safe_expr (&se);
2955
      gfc_add_block_to_block (&block, &se.pre);
2956
      end[n] = se.expr;
2957
 
2958
      gfc_init_se (&se, NULL);
2959
      gfc_conv_expr_val (&se, fa->stride);
2960
      /* Record it in this_forall.  */
2961
      this_forall->step = se.expr;
2962
      gfc_make_safe_expr (&se);
2963
      gfc_add_block_to_block (&block, &se.pre);
2964
      step[n] = se.expr;
2965
 
2966
      /* Set the NEXT field of this_forall to NULL.  */
2967
      this_forall->next = NULL;
2968
      /* Link this_forall to the info construct.  */
2969
      if (info->this_loop)
2970
        {
2971
          iter_info *iter_tmp = info->this_loop;
2972
          while (iter_tmp->next != NULL)
2973
            iter_tmp = iter_tmp->next;
2974
          iter_tmp->next = this_forall;
2975
        }
2976
      else
2977
        info->this_loop = this_forall;
2978
 
2979
      n++;
2980
    }
2981
  nvar = n;
2982
 
2983
  /* Calculate the size needed for the current forall level.  */
2984
  size = gfc_index_one_node;
2985
  for (n = 0; n < nvar; n++)
2986
    {
2987
      /* size = (end + step - start) / step.  */
2988
      tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2989
                         step[n], start[n]);
2990
      tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2991
 
2992
      tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2993
      tmp = convert (gfc_array_index_type, tmp);
2994
 
2995
      size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2996
    }
2997
 
2998
  /* Record the nvar and size of current forall level.  */
2999
  info->nvar = nvar;
3000
  info->size = size;
3001
 
3002
  if (code->expr1)
3003
    {
3004
      /* If the mask is .true., consider the FORALL unconditional.  */
3005
      if (code->expr1->expr_type == EXPR_CONSTANT
3006
          && code->expr1->value.logical)
3007
        need_mask = false;
3008
      else
3009
        need_mask = true;
3010
    }
3011
  else
3012
    need_mask = false;
3013
 
3014
  /* First we need to allocate the mask.  */
3015
  if (need_mask)
3016
    {
3017
      /* As the mask array can be very big, prefer compact boolean types.  */
3018
      tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3019
      mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3020
                                            size, NULL, &block, &pmask);
3021
      maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3022
 
3023
      /* Record them in the info structure.  */
3024
      info->maskindex = maskindex;
3025
      info->mask = mask;
3026
    }
3027
  else
3028
    {
3029
      /* No mask was specified.  */
3030
      maskindex = NULL_TREE;
3031
      mask = pmask = NULL_TREE;
3032
    }
3033
 
3034
  /* Link the current forall level to nested_forall_info.  */
3035
  info->prev_nest = nested_forall_info;
3036
  nested_forall_info = info;
3037
 
3038
  /* Copy the mask into a temporary variable if required.
3039
     For now we assume a mask temporary is needed.  */
3040
  if (need_mask)
3041
    {
3042
      /* As the mask array can be very big, prefer compact boolean types.  */
3043
      tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3044
 
3045
      gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3046
 
3047
      /* Start of mask assignment loop body.  */
3048
      gfc_start_block (&body);
3049
 
3050
      /* Evaluate the mask expression.  */
3051
      gfc_init_se (&se, NULL);
3052
      gfc_conv_expr_val (&se, code->expr1);
3053
      gfc_add_block_to_block (&body, &se.pre);
3054
 
3055
      /* Store the mask.  */
3056
      se.expr = convert (mask_type, se.expr);
3057
 
3058
      tmp = gfc_build_array_ref (mask, maskindex, NULL);
3059
      gfc_add_modify (&body, tmp, se.expr);
3060
 
3061
      /* Advance to the next mask element.  */
3062
      tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3063
                         maskindex, gfc_index_one_node);
3064
      gfc_add_modify (&body, maskindex, tmp);
3065
 
3066
      /* Generate the loops.  */
3067
      tmp = gfc_finish_block (&body);
3068
      tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3069
      gfc_add_expr_to_block (&block, tmp);
3070
    }
3071
 
3072
  c = code->block->next;
3073
 
3074
  /* TODO: loop merging in FORALL statements.  */
3075
  /* Now that we've got a copy of the mask, generate the assignment loops.  */
3076
  while (c)
3077
    {
3078
      switch (c->op)
3079
        {
3080
        case EXEC_ASSIGN:
3081
          /* A scalar or array assignment.  DO the simple check for
3082
             lhs to rhs dependencies.  These make a temporary for the
3083
             rhs and form a second forall block to copy to variable.  */
3084
          need_temp = check_forall_dependencies(c, &pre, &post);
3085
 
3086
          /* Temporaries due to array assignment data dependencies introduce
3087
             no end of problems.  */
3088
          if (need_temp)
3089
            gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3090
                                        nested_forall_info, &block);
3091
          else
3092
            {
3093
              /* Use the normal assignment copying routines.  */
3094
              assign = gfc_trans_assignment (c->expr1, c->expr2, false);
3095
 
3096
              /* Generate body and loops.  */
3097
              tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3098
                                                  assign, 1);
3099
              gfc_add_expr_to_block (&block, tmp);
3100
            }
3101
 
3102
          /* Cleanup any temporary symtrees that have been made to deal
3103
             with dependencies.  */
3104
          if (new_symtree)
3105
            cleanup_forall_symtrees (c);
3106
 
3107
          break;
3108
 
3109
        case EXEC_WHERE:
3110
          /* Translate WHERE or WHERE construct nested in FORALL.  */
3111
          gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3112
          break;
3113
 
3114
        /* Pointer assignment inside FORALL.  */
3115
        case EXEC_POINTER_ASSIGN:
3116
          need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3117
          if (need_temp)
3118
            gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3119
                                                nested_forall_info, &block);
3120
          else
3121
            {
3122
              /* Use the normal assignment copying routines.  */
3123
              assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3124
 
3125
              /* Generate body and loops.  */
3126
              tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3127
                                                  assign, 1);
3128
              gfc_add_expr_to_block (&block, tmp);
3129
            }
3130
          break;
3131
 
3132
        case EXEC_FORALL:
3133
          tmp = gfc_trans_forall_1 (c, nested_forall_info);
3134
          gfc_add_expr_to_block (&block, tmp);
3135
          break;
3136
 
3137
        /* Explicit subroutine calls are prevented by the frontend but interface
3138
           assignments can legitimately produce them.  */
3139
        case EXEC_ASSIGN_CALL:
3140
          assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3141
          tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3142
          gfc_add_expr_to_block (&block, tmp);
3143
          break;
3144
 
3145
        default:
3146
          gcc_unreachable ();
3147
        }
3148
 
3149
      c = c->next;
3150
    }
3151
 
3152
  /* Restore the original index variables.  */
3153
  for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3154
    gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3155
 
3156
  /* Free the space for var, start, end, step, varexpr.  */
3157
  gfc_free (var);
3158
  gfc_free (start);
3159
  gfc_free (end);
3160
  gfc_free (step);
3161
  gfc_free (varexpr);
3162
  gfc_free (saved_vars);
3163
 
3164
  /* Free the space for this forall_info.  */
3165
  gfc_free (info);
3166
 
3167
  if (pmask)
3168
    {
3169
      /* Free the temporary for the mask.  */
3170
      tmp = gfc_call_free (pmask);
3171
      gfc_add_expr_to_block (&block, tmp);
3172
    }
3173
  if (maskindex)
3174
    pushdecl (maskindex);
3175
 
3176
  gfc_add_block_to_block (&pre, &block);
3177
  gfc_add_block_to_block (&pre, &post);
3178
 
3179
  return gfc_finish_block (&pre);
3180
}
3181
 
3182
 
3183
/* Translate the FORALL statement or construct.  */
3184
 
3185
tree gfc_trans_forall (gfc_code * code)
3186
{
3187
  return gfc_trans_forall_1 (code, NULL);
3188
}
3189
 
3190
 
3191
/* Evaluate the WHERE mask expression, copy its value to a temporary.
3192
   If the WHERE construct is nested in FORALL, compute the overall temporary
3193
   needed by the WHERE mask expression multiplied by the iterator number of
3194
   the nested forall.
3195
   ME is the WHERE mask expression.
3196
   MASK is the current execution mask upon input, whose sense may or may
3197
   not be inverted as specified by the INVERT argument.
3198
   CMASK is the updated execution mask on output, or NULL if not required.
3199
   PMASK is the pending execution mask on output, or NULL if not required.
3200
   BLOCK is the block in which to place the condition evaluation loops.  */
3201
 
3202
static void
3203
gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3204
                         tree mask, bool invert, tree cmask, tree pmask,
3205
                         tree mask_type, stmtblock_t * block)
3206
{
3207
  tree tmp, tmp1;
3208
  gfc_ss *lss, *rss;
3209
  gfc_loopinfo loop;
3210
  stmtblock_t body, body1;
3211
  tree count, cond, mtmp;
3212
  gfc_se lse, rse;
3213
 
3214
  gfc_init_loopinfo (&loop);
3215
 
3216
  lss = gfc_walk_expr (me);
3217
  rss = gfc_walk_expr (me);
3218
 
3219
  /* Variable to index the temporary.  */
3220
  count = gfc_create_var (gfc_array_index_type, "count");
3221
  /* Initialize count.  */
3222
  gfc_add_modify (block, count, gfc_index_zero_node);
3223
 
3224
  gfc_start_block (&body);
3225
 
3226
  gfc_init_se (&rse, NULL);
3227
  gfc_init_se (&lse, NULL);
3228
 
3229
  if (lss == gfc_ss_terminator)
3230
    {
3231
      gfc_init_block (&body1);
3232
    }
3233
  else
3234
    {
3235
      /* Initialize the loop.  */
3236
      gfc_init_loopinfo (&loop);
3237
 
3238
      /* We may need LSS to determine the shape of the expression.  */
3239
      gfc_add_ss_to_loop (&loop, lss);
3240
      gfc_add_ss_to_loop (&loop, rss);
3241
 
3242
      gfc_conv_ss_startstride (&loop);
3243
      gfc_conv_loop_setup (&loop, &me->where);
3244
 
3245
      gfc_mark_ss_chain_used (rss, 1);
3246
      /* Start the loop body.  */
3247
      gfc_start_scalarized_body (&loop, &body1);
3248
 
3249
      /* Translate the expression.  */
3250
      gfc_copy_loopinfo_to_se (&rse, &loop);
3251
      rse.ss = rss;
3252
      gfc_conv_expr (&rse, me);
3253
    }
3254
 
3255
  /* Variable to evaluate mask condition.  */
3256
  cond = gfc_create_var (mask_type, "cond");
3257
  if (mask && (cmask || pmask))
3258
    mtmp = gfc_create_var (mask_type, "mask");
3259
  else mtmp = NULL_TREE;
3260
 
3261
  gfc_add_block_to_block (&body1, &lse.pre);
3262
  gfc_add_block_to_block (&body1, &rse.pre);
3263
 
3264
  gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3265
 
3266
  if (mask && (cmask || pmask))
3267
    {
3268
      tmp = gfc_build_array_ref (mask, count, NULL);
3269
      if (invert)
3270
        tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3271
      gfc_add_modify (&body1, mtmp, tmp);
3272
    }
3273
 
3274
  if (cmask)
3275
    {
3276
      tmp1 = gfc_build_array_ref (cmask, count, NULL);
3277
      tmp = cond;
3278
      if (mask)
3279
        tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3280
      gfc_add_modify (&body1, tmp1, tmp);
3281
    }
3282
 
3283
  if (pmask)
3284
    {
3285
      tmp1 = gfc_build_array_ref (pmask, count, NULL);
3286
      tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3287
      if (mask)
3288
        tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3289
      gfc_add_modify (&body1, tmp1, tmp);
3290
    }
3291
 
3292
  gfc_add_block_to_block (&body1, &lse.post);
3293
  gfc_add_block_to_block (&body1, &rse.post);
3294
 
3295
  if (lss == gfc_ss_terminator)
3296
    {
3297
      gfc_add_block_to_block (&body, &body1);
3298
    }
3299
  else
3300
    {
3301
      /* Increment count.  */
3302
      tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3303
                          gfc_index_one_node);
3304
      gfc_add_modify (&body1, count, tmp1);
3305
 
3306
      /* Generate the copying loops.  */
3307
      gfc_trans_scalarizing_loops (&loop, &body1);
3308
 
3309
      gfc_add_block_to_block (&body, &loop.pre);
3310
      gfc_add_block_to_block (&body, &loop.post);
3311
 
3312
      gfc_cleanup_loop (&loop);
3313
      /* TODO: Reuse lss and rss when copying temp->lhs.  Need to be careful
3314
         as tree nodes in SS may not be valid in different scope.  */
3315
    }
3316
 
3317
  tmp1 = gfc_finish_block (&body);
3318
  /* If the WHERE construct is inside FORALL, fill the full temporary.  */
3319
  if (nested_forall_info != NULL)
3320
    tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3321
 
3322
  gfc_add_expr_to_block (block, tmp1);
3323
}
3324
 
3325
 
3326
/* Translate an assignment statement in a WHERE statement or construct
3327
   statement. The MASK expression is used to control which elements
3328
   of EXPR1 shall be assigned.  The sense of MASK is specified by
3329
   INVERT.  */
3330
 
3331
static tree
3332
gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3333
                        tree mask, bool invert,
3334
                        tree count1, tree count2,
3335
                        gfc_code *cnext)
3336
{
3337
  gfc_se lse;
3338
  gfc_se rse;
3339
  gfc_ss *lss;
3340
  gfc_ss *lss_section;
3341
  gfc_ss *rss;
3342
 
3343
  gfc_loopinfo loop;
3344
  tree tmp;
3345
  stmtblock_t block;
3346
  stmtblock_t body;
3347
  tree index, maskexpr;
3348
 
3349
  /* A defined assignment. */
3350
  if (cnext && cnext->resolved_sym)
3351
    return gfc_trans_call (cnext, true, mask, count1, invert);
3352
 
3353
#if 0
3354
  /* TODO: handle this special case.
3355
     Special case a single function returning an array.  */
3356
  if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3357
    {
3358
      tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3359
      if (tmp)
3360
        return tmp;
3361
    }
3362
#endif
3363
 
3364
 /* Assignment of the form lhs = rhs.  */
3365
  gfc_start_block (&block);
3366
 
3367
  gfc_init_se (&lse, NULL);
3368
  gfc_init_se (&rse, NULL);
3369
 
3370
  /* Walk the lhs.  */
3371
  lss = gfc_walk_expr (expr1);
3372
  rss = NULL;
3373
 
3374
  /* In each where-assign-stmt, the mask-expr and the variable being
3375
     defined shall be arrays of the same shape.  */
3376
  gcc_assert (lss != gfc_ss_terminator);
3377
 
3378
  /* The assignment needs scalarization.  */
3379
  lss_section = lss;
3380
 
3381
  /* Find a non-scalar SS from the lhs.  */
3382
  while (lss_section != gfc_ss_terminator
3383
         && lss_section->type != GFC_SS_SECTION)
3384
    lss_section = lss_section->next;
3385
 
3386
  gcc_assert (lss_section != gfc_ss_terminator);
3387
 
3388
  /* Initialize the scalarizer.  */
3389
  gfc_init_loopinfo (&loop);
3390
 
3391
  /* Walk the rhs.  */
3392
  rss = gfc_walk_expr (expr2);
3393
  if (rss == gfc_ss_terminator)
3394
   {
3395
     /* The rhs is scalar.  Add a ss for the expression.  */
3396
     rss = gfc_get_ss ();
3397
     rss->where = 1;
3398
     rss->next = gfc_ss_terminator;
3399
     rss->type = GFC_SS_SCALAR;
3400
     rss->expr = expr2;
3401
    }
3402
 
3403
  /* Associate the SS with the loop.  */
3404
  gfc_add_ss_to_loop (&loop, lss);
3405
  gfc_add_ss_to_loop (&loop, rss);
3406
 
3407
  /* Calculate the bounds of the scalarization.  */
3408
  gfc_conv_ss_startstride (&loop);
3409
 
3410
  /* Resolve any data dependencies in the statement.  */
3411
  gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3412
 
3413
  /* Setup the scalarizing loops.  */
3414
  gfc_conv_loop_setup (&loop, &expr2->where);
3415
 
3416
  /* Setup the gfc_se structures.  */
3417
  gfc_copy_loopinfo_to_se (&lse, &loop);
3418
  gfc_copy_loopinfo_to_se (&rse, &loop);
3419
 
3420
  rse.ss = rss;
3421
  gfc_mark_ss_chain_used (rss, 1);
3422
  if (loop.temp_ss == NULL)
3423
    {
3424
      lse.ss = lss;
3425
      gfc_mark_ss_chain_used (lss, 1);
3426
    }
3427
  else
3428
    {
3429
      lse.ss = loop.temp_ss;
3430
      gfc_mark_ss_chain_used (lss, 3);
3431
      gfc_mark_ss_chain_used (loop.temp_ss, 3);
3432
    }
3433
 
3434
  /* Start the scalarized loop body.  */
3435
  gfc_start_scalarized_body (&loop, &body);
3436
 
3437
  /* Translate the expression.  */
3438
  gfc_conv_expr (&rse, expr2);
3439
  if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3440
    {
3441
      gfc_conv_tmp_array_ref (&lse);
3442
      gfc_advance_se_ss_chain (&lse);
3443
    }
3444
  else
3445
    gfc_conv_expr (&lse, expr1);
3446
 
3447
  /* Form the mask expression according to the mask.  */
3448
  index = count1;
3449
  maskexpr = gfc_build_array_ref (mask, index, NULL);
3450
  if (invert)
3451
    maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3452
 
3453
  /* Use the scalar assignment as is.  */
3454
  tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3455
                                 loop.temp_ss != NULL, false);
3456
 
3457
  tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
3458
 
3459
  gfc_add_expr_to_block (&body, tmp);
3460
 
3461
  if (lss == gfc_ss_terminator)
3462
    {
3463
      /* Increment count1.  */
3464
      tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3465
                         count1, gfc_index_one_node);
3466
      gfc_add_modify (&body, count1, tmp);
3467
 
3468
      /* Use the scalar assignment as is.  */
3469
      gfc_add_block_to_block (&block, &body);
3470
    }
3471
  else
3472
    {
3473
      gcc_assert (lse.ss == gfc_ss_terminator
3474
                  && rse.ss == gfc_ss_terminator);
3475
 
3476
      if (loop.temp_ss != NULL)
3477
        {
3478
          /* Increment count1 before finish the main body of a scalarized
3479
             expression.  */
3480
          tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3481
                             count1, gfc_index_one_node);
3482
          gfc_add_modify (&body, count1, tmp);
3483
          gfc_trans_scalarized_loop_boundary (&loop, &body);
3484
 
3485
          /* We need to copy the temporary to the actual lhs.  */
3486
          gfc_init_se (&lse, NULL);
3487
          gfc_init_se (&rse, NULL);
3488
          gfc_copy_loopinfo_to_se (&lse, &loop);
3489
          gfc_copy_loopinfo_to_se (&rse, &loop);
3490
 
3491
          rse.ss = loop.temp_ss;
3492
          lse.ss = lss;
3493
 
3494
          gfc_conv_tmp_array_ref (&rse);
3495
          gfc_advance_se_ss_chain (&rse);
3496
          gfc_conv_expr (&lse, expr1);
3497
 
3498
          gcc_assert (lse.ss == gfc_ss_terminator
3499
                      && rse.ss == gfc_ss_terminator);
3500
 
3501
          /* Form the mask expression according to the mask tree list.  */
3502
          index = count2;
3503
          maskexpr = gfc_build_array_ref (mask, index, NULL);
3504
          if (invert)
3505
            maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3506
                                    maskexpr);
3507
 
3508
          /* Use the scalar assignment as is.  */
3509
          tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3510
          tmp = build3_v (COND_EXPR, maskexpr, tmp,
3511
                          build_empty_stmt (input_location));
3512
          gfc_add_expr_to_block (&body, tmp);
3513
 
3514
          /* Increment count2.  */
3515
          tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3516
                             count2, gfc_index_one_node);
3517
          gfc_add_modify (&body, count2, tmp);
3518
        }
3519
      else
3520
        {
3521
          /* Increment count1.  */
3522
          tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3523
                             count1, gfc_index_one_node);
3524
          gfc_add_modify (&body, count1, tmp);
3525
        }
3526
 
3527
      /* Generate the copying loops.  */
3528
      gfc_trans_scalarizing_loops (&loop, &body);
3529
 
3530
      /* Wrap the whole thing up.  */
3531
      gfc_add_block_to_block (&block, &loop.pre);
3532
      gfc_add_block_to_block (&block, &loop.post);
3533
      gfc_cleanup_loop (&loop);
3534
    }
3535
 
3536
  return gfc_finish_block (&block);
3537
}
3538
 
3539
 
3540
/* Translate the WHERE construct or statement.
3541
   This function can be called iteratively to translate the nested WHERE
3542
   construct or statement.
3543
   MASK is the control mask.  */
3544
 
3545
static void
3546
gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3547
                   forall_info * nested_forall_info, stmtblock_t * block)
3548
{
3549
  stmtblock_t inner_size_body;
3550
  tree inner_size, size;
3551
  gfc_ss *lss, *rss;
3552
  tree mask_type;
3553
  gfc_expr *expr1;
3554
  gfc_expr *expr2;
3555
  gfc_code *cblock;
3556
  gfc_code *cnext;
3557
  tree tmp;
3558
  tree cond;
3559
  tree count1, count2;
3560
  bool need_cmask;
3561
  bool need_pmask;
3562
  int need_temp;
3563
  tree pcmask = NULL_TREE;
3564
  tree ppmask = NULL_TREE;
3565
  tree cmask = NULL_TREE;
3566
  tree pmask = NULL_TREE;
3567
  gfc_actual_arglist *arg;
3568
 
3569
  /* the WHERE statement or the WHERE construct statement.  */
3570
  cblock = code->block;
3571
 
3572
  /* As the mask array can be very big, prefer compact boolean types.  */
3573
  mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3574
 
3575
  /* Determine which temporary masks are needed.  */
3576
  if (!cblock->block)
3577
    {
3578
      /* One clause: No ELSEWHEREs.  */
3579
      need_cmask = (cblock->next != 0);
3580
      need_pmask = false;
3581
    }
3582
  else if (cblock->block->block)
3583
    {
3584
      /* Three or more clauses: Conditional ELSEWHEREs.  */
3585
      need_cmask = true;
3586
      need_pmask = true;
3587
    }
3588
  else if (cblock->next)
3589
    {
3590
      /* Two clauses, the first non-empty.  */
3591
      need_cmask = true;
3592
      need_pmask = (mask != NULL_TREE
3593
                    && cblock->block->next != 0);
3594
    }
3595
  else if (!cblock->block->next)
3596
    {
3597
      /* Two clauses, both empty.  */
3598
      need_cmask = false;
3599
      need_pmask = false;
3600
    }
3601
  /* Two clauses, the first empty, the second non-empty.  */
3602
  else if (mask)
3603
    {
3604
      need_cmask = (cblock->block->expr1 != 0);
3605
      need_pmask = true;
3606
    }
3607
  else
3608
    {
3609
      need_cmask = true;
3610
      need_pmask = false;
3611
    }
3612
 
3613
  if (need_cmask || need_pmask)
3614
    {
3615
      /* Calculate the size of temporary needed by the mask-expr.  */
3616
      gfc_init_block (&inner_size_body);
3617
      inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
3618
                                            &inner_size_body, &lss, &rss);
3619
 
3620
      /* Calculate the total size of temporary needed.  */
3621
      size = compute_overall_iter_number (nested_forall_info, inner_size,
3622
                                          &inner_size_body, block);
3623
 
3624
      /* Check whether the size is negative.  */
3625
      cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3626
                          gfc_index_zero_node);
3627
      size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3628
                          gfc_index_zero_node, size);
3629
      size = gfc_evaluate_now (size, block);
3630
 
3631
      /* Allocate temporary for WHERE mask if needed.  */
3632
      if (need_cmask)
3633
        cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3634
                                                 &pcmask);
3635
 
3636
      /* Allocate temporary for !mask if needed.  */
3637
      if (need_pmask)
3638
        pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3639
                                                 &ppmask);
3640
    }
3641
 
3642
  while (cblock)
3643
    {
3644
      /* Each time around this loop, the where clause is conditional
3645
         on the value of mask and invert, which are updated at the
3646
         bottom of the loop.  */
3647
 
3648
      /* Has mask-expr.  */
3649
      if (cblock->expr1)
3650
        {
3651
          /* Ensure that the WHERE mask will be evaluated exactly once.
3652
             If there are no statements in this WHERE/ELSEWHERE clause,
3653
             then we don't need to update the control mask (cmask).
3654
             If this is the last clause of the WHERE construct, then
3655
             we don't need to update the pending control mask (pmask).  */
3656
          if (mask)
3657
            gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3658
                                     mask, invert,
3659
                                     cblock->next  ? cmask : NULL_TREE,
3660
                                     cblock->block ? pmask : NULL_TREE,
3661
                                     mask_type, block);
3662
          else
3663
            gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3664
                                     NULL_TREE, false,
3665
                                     (cblock->next || cblock->block)
3666
                                     ? cmask : NULL_TREE,
3667
                                     NULL_TREE, mask_type, block);
3668
 
3669
          invert = false;
3670
        }
3671
      /* It's a final elsewhere-stmt. No mask-expr is present.  */
3672
      else
3673
        cmask = mask;
3674
 
3675
      /* The body of this where clause are controlled by cmask with
3676
         sense specified by invert.  */
3677
 
3678
      /* Get the assignment statement of a WHERE statement, or the first
3679
         statement in where-body-construct of a WHERE construct.  */
3680
      cnext = cblock->next;
3681
      while (cnext)
3682
        {
3683
          switch (cnext->op)
3684
            {
3685
            /* WHERE assignment statement.  */
3686
            case EXEC_ASSIGN_CALL:
3687
 
3688
              arg = cnext->ext.actual;
3689
              expr1 = expr2 = NULL;
3690
              for (; arg; arg = arg->next)
3691
                {
3692
                  if (!arg->expr)
3693
                    continue;
3694
                  if (expr1 == NULL)
3695
                    expr1 = arg->expr;
3696
                  else
3697
                    expr2 = arg->expr;
3698
                }
3699
              goto evaluate;
3700
 
3701
            case EXEC_ASSIGN:
3702
              expr1 = cnext->expr1;
3703
              expr2 = cnext->expr2;
3704
    evaluate:
3705
              if (nested_forall_info != NULL)
3706
                {
3707
                  need_temp = gfc_check_dependency (expr1, expr2, 0);
3708
                  if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3709
                    gfc_trans_assign_need_temp (expr1, expr2,
3710
                                                cmask, invert,
3711
                                                nested_forall_info, block);
3712
                  else
3713
                    {
3714
                      /* Variables to control maskexpr.  */
3715
                      count1 = gfc_create_var (gfc_array_index_type, "count1");
3716
                      count2 = gfc_create_var (gfc_array_index_type, "count2");
3717
                      gfc_add_modify (block, count1, gfc_index_zero_node);
3718
                      gfc_add_modify (block, count2, gfc_index_zero_node);
3719
 
3720
                      tmp = gfc_trans_where_assign (expr1, expr2,
3721
                                                    cmask, invert,
3722
                                                    count1, count2,
3723
                                                    cnext);
3724
 
3725
                      tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3726
                                                          tmp, 1);
3727
                      gfc_add_expr_to_block (block, tmp);
3728
                    }
3729
                }
3730
              else
3731
                {
3732
                  /* Variables to control maskexpr.  */
3733
                  count1 = gfc_create_var (gfc_array_index_type, "count1");
3734
                  count2 = gfc_create_var (gfc_array_index_type, "count2");
3735
                  gfc_add_modify (block, count1, gfc_index_zero_node);
3736
                  gfc_add_modify (block, count2, gfc_index_zero_node);
3737
 
3738
                  tmp = gfc_trans_where_assign (expr1, expr2,
3739
                                                cmask, invert,
3740
                                                count1, count2,
3741
                                                cnext);
3742
                  gfc_add_expr_to_block (block, tmp);
3743
 
3744
                }
3745
              break;
3746
 
3747
            /* WHERE or WHERE construct is part of a where-body-construct.  */
3748
            case EXEC_WHERE:
3749
              gfc_trans_where_2 (cnext, cmask, invert,
3750
                                 nested_forall_info, block);
3751
              break;
3752
 
3753
            default:
3754
              gcc_unreachable ();
3755
            }
3756
 
3757
         /* The next statement within the same where-body-construct.  */
3758
         cnext = cnext->next;
3759
       }
3760
    /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt.  */
3761
    cblock = cblock->block;
3762
    if (mask == NULL_TREE)
3763
      {
3764
        /* If we're the initial WHERE, we can simply invert the sense
3765
           of the current mask to obtain the "mask" for the remaining
3766
           ELSEWHEREs.  */
3767
        invert = true;
3768
        mask = cmask;
3769
      }
3770
    else
3771
      {
3772
        /* Otherwise, for nested WHERE's we need to use the pending mask.  */
3773
        invert = false;
3774
        mask = pmask;
3775
      }
3776
  }
3777
 
3778
  /* If we allocated a pending mask array, deallocate it now.  */
3779
  if (ppmask)
3780
    {
3781
      tmp = gfc_call_free (ppmask);
3782
      gfc_add_expr_to_block (block, tmp);
3783
    }
3784
 
3785
  /* If we allocated a current mask array, deallocate it now.  */
3786
  if (pcmask)
3787
    {
3788
      tmp = gfc_call_free (pcmask);
3789
      gfc_add_expr_to_block (block, tmp);
3790
    }
3791
}
3792
 
3793
/* Translate a simple WHERE construct or statement without dependencies.
3794
   CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3795
   is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3796
   Currently both CBLOCK and EBLOCK are restricted to single assignments.  */
3797
 
3798
static tree
3799
gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3800
{
3801
  stmtblock_t block, body;
3802
  gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3803
  tree tmp, cexpr, tstmt, estmt;
3804
  gfc_ss *css, *tdss, *tsss;
3805
  gfc_se cse, tdse, tsse, edse, esse;
3806
  gfc_loopinfo loop;
3807
  gfc_ss *edss = 0;
3808
  gfc_ss *esss = 0;
3809
 
3810
  /* Allow the scalarizer to workshare simple where loops.  */
3811
  if (ompws_flags & OMPWS_WORKSHARE_FLAG)
3812
    ompws_flags |= OMPWS_SCALARIZER_WS;
3813
 
3814
  cond = cblock->expr1;
3815
  tdst = cblock->next->expr1;
3816
  tsrc = cblock->next->expr2;
3817
  edst = eblock ? eblock->next->expr1 : NULL;
3818
  esrc = eblock ? eblock->next->expr2 : NULL;
3819
 
3820
  gfc_start_block (&block);
3821
  gfc_init_loopinfo (&loop);
3822
 
3823
  /* Handle the condition.  */
3824
  gfc_init_se (&cse, NULL);
3825
  css = gfc_walk_expr (cond);
3826
  gfc_add_ss_to_loop (&loop, css);
3827
 
3828
  /* Handle the then-clause.  */
3829
  gfc_init_se (&tdse, NULL);
3830
  gfc_init_se (&tsse, NULL);
3831
  tdss = gfc_walk_expr (tdst);
3832
  tsss = gfc_walk_expr (tsrc);
3833
  if (tsss == gfc_ss_terminator)
3834
    {
3835
      tsss = gfc_get_ss ();
3836
      tsss->where = 1;
3837
      tsss->next = gfc_ss_terminator;
3838
      tsss->type = GFC_SS_SCALAR;
3839
      tsss->expr = tsrc;
3840
    }
3841
  gfc_add_ss_to_loop (&loop, tdss);
3842
  gfc_add_ss_to_loop (&loop, tsss);
3843
 
3844
  if (eblock)
3845
    {
3846
      /* Handle the else clause.  */
3847
      gfc_init_se (&edse, NULL);
3848
      gfc_init_se (&esse, NULL);
3849
      edss = gfc_walk_expr (edst);
3850
      esss = gfc_walk_expr (esrc);
3851
      if (esss == gfc_ss_terminator)
3852
        {
3853
          esss = gfc_get_ss ();
3854
          esss->where = 1;
3855
          esss->next = gfc_ss_terminator;
3856
          esss->type = GFC_SS_SCALAR;
3857
          esss->expr = esrc;
3858
        }
3859
      gfc_add_ss_to_loop (&loop, edss);
3860
      gfc_add_ss_to_loop (&loop, esss);
3861
    }
3862
 
3863
  gfc_conv_ss_startstride (&loop);
3864
  gfc_conv_loop_setup (&loop, &tdst->where);
3865
 
3866
  gfc_mark_ss_chain_used (css, 1);
3867
  gfc_mark_ss_chain_used (tdss, 1);
3868
  gfc_mark_ss_chain_used (tsss, 1);
3869
  if (eblock)
3870
    {
3871
      gfc_mark_ss_chain_used (edss, 1);
3872
      gfc_mark_ss_chain_used (esss, 1);
3873
    }
3874
 
3875
  gfc_start_scalarized_body (&loop, &body);
3876
 
3877
  gfc_copy_loopinfo_to_se (&cse, &loop);
3878
  gfc_copy_loopinfo_to_se (&tdse, &loop);
3879
  gfc_copy_loopinfo_to_se (&tsse, &loop);
3880
  cse.ss = css;
3881
  tdse.ss = tdss;
3882
  tsse.ss = tsss;
3883
  if (eblock)
3884
    {
3885
      gfc_copy_loopinfo_to_se (&edse, &loop);
3886
      gfc_copy_loopinfo_to_se (&esse, &loop);
3887
      edse.ss = edss;
3888
      esse.ss = esss;
3889
    }
3890
 
3891
  gfc_conv_expr (&cse, cond);
3892
  gfc_add_block_to_block (&body, &cse.pre);
3893
  cexpr = cse.expr;
3894
 
3895
  gfc_conv_expr (&tsse, tsrc);
3896
  if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3897
    {
3898
      gfc_conv_tmp_array_ref (&tdse);
3899
      gfc_advance_se_ss_chain (&tdse);
3900
    }
3901
  else
3902
    gfc_conv_expr (&tdse, tdst);
3903
 
3904
  if (eblock)
3905
    {
3906
      gfc_conv_expr (&esse, esrc);
3907
      if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3908
        {
3909
          gfc_conv_tmp_array_ref (&edse);
3910
          gfc_advance_se_ss_chain (&edse);
3911
        }
3912
      else
3913
        gfc_conv_expr (&edse, edst);
3914
    }
3915
 
3916
  tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3917
  estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3918
                 : build_empty_stmt (input_location);
3919
  tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3920
  gfc_add_expr_to_block (&body, tmp);
3921
  gfc_add_block_to_block (&body, &cse.post);
3922
 
3923
  gfc_trans_scalarizing_loops (&loop, &body);
3924
  gfc_add_block_to_block (&block, &loop.pre);
3925
  gfc_add_block_to_block (&block, &loop.post);
3926
  gfc_cleanup_loop (&loop);
3927
 
3928
  return gfc_finish_block (&block);
3929
}
3930
 
3931
/* As the WHERE or WHERE construct statement can be nested, we call
3932
   gfc_trans_where_2 to do the translation, and pass the initial
3933
   NULL values for both the control mask and the pending control mask.  */
3934
 
3935
tree
3936
gfc_trans_where (gfc_code * code)
3937
{
3938
  stmtblock_t block;
3939
  gfc_code *cblock;
3940
  gfc_code *eblock;
3941
 
3942
  cblock = code->block;
3943
  if (cblock->next
3944
      && cblock->next->op == EXEC_ASSIGN
3945
      && !cblock->next->next)
3946
    {
3947
      eblock = cblock->block;
3948
      if (!eblock)
3949
        {
3950
          /* A simple "WHERE (cond) x = y" statement or block is
3951
             dependence free if cond is not dependent upon writing x,
3952
             and the source y is unaffected by the destination x.  */
3953
          if (!gfc_check_dependency (cblock->next->expr1,
3954
                                     cblock->expr1, 0)
3955
              && !gfc_check_dependency (cblock->next->expr1,
3956
                                        cblock->next->expr2, 0))
3957
            return gfc_trans_where_3 (cblock, NULL);
3958
        }
3959
      else if (!eblock->expr1
3960
               && !eblock->block
3961
               && eblock->next
3962
               && eblock->next->op == EXEC_ASSIGN
3963
               && !eblock->next->next)
3964
        {
3965
          /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3966
             block is dependence free if cond is not dependent on writes
3967
             to x1 and x2, y1 is not dependent on writes to x2, and y2
3968
             is not dependent on writes to x1, and both y's are not
3969
             dependent upon their own x's.  In addition to this, the
3970
             final two dependency checks below exclude all but the same
3971
             array reference if the where and elswhere destinations
3972
             are the same.  In short, this is VERY conservative and this
3973
             is needed because the two loops, required by the standard
3974
             are coalesced in gfc_trans_where_3.  */
3975
          if (!gfc_check_dependency(cblock->next->expr1,
3976
                                    cblock->expr1, 0)
3977
              && !gfc_check_dependency(eblock->next->expr1,
3978
                                       cblock->expr1, 0)
3979
              && !gfc_check_dependency(cblock->next->expr1,
3980
                                       eblock->next->expr2, 1)
3981
              && !gfc_check_dependency(eblock->next->expr1,
3982
                                       cblock->next->expr2, 1)
3983
              && !gfc_check_dependency(cblock->next->expr1,
3984
                                       cblock->next->expr2, 1)
3985
              && !gfc_check_dependency(eblock->next->expr1,
3986
                                       eblock->next->expr2, 1)
3987
              && !gfc_check_dependency(cblock->next->expr1,
3988
                                       eblock->next->expr1, 0)
3989
              && !gfc_check_dependency(eblock->next->expr1,
3990
                                       cblock->next->expr1, 0))
3991
            return gfc_trans_where_3 (cblock, eblock);
3992
        }
3993
    }
3994
 
3995
  gfc_start_block (&block);
3996
 
3997
  gfc_trans_where_2 (code, NULL, false, NULL, &block);
3998
 
3999
  return gfc_finish_block (&block);
4000
}
4001
 
4002
 
4003
/* CYCLE a DO loop. The label decl has already been created by
4004
   gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4005
   node at the head of the loop. We must mark the label as used.  */
4006
 
4007
tree
4008
gfc_trans_cycle (gfc_code * code)
4009
{
4010
  tree cycle_label;
4011
 
4012
  cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
4013
  TREE_USED (cycle_label) = 1;
4014
  return build1_v (GOTO_EXPR, cycle_label);
4015
}
4016
 
4017
 
4018
/* EXIT a DO loop. Similar to CYCLE, but now the label is in
4019
   TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4020
   loop.  */
4021
 
4022
tree
4023
gfc_trans_exit (gfc_code * code)
4024
{
4025
  tree exit_label;
4026
 
4027
  exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
4028
  TREE_USED (exit_label) = 1;
4029
  return build1_v (GOTO_EXPR, exit_label);
4030
}
4031
 
4032
 
4033
/* Translate the ALLOCATE statement.  */
4034
 
4035
tree
4036
gfc_trans_allocate (gfc_code * code)
4037
{
4038
  gfc_alloc *al;
4039
  gfc_expr *expr;
4040
  gfc_se se;
4041
  tree tmp;
4042
  tree parm;
4043
  tree stat;
4044
  tree pstat;
4045
  tree error_label;
4046
  tree memsz;
4047
  stmtblock_t block;
4048
 
4049
  if (!code->ext.alloc.list)
4050
    return NULL_TREE;
4051
 
4052
  pstat = stat = error_label = tmp = memsz = NULL_TREE;
4053
 
4054
  gfc_start_block (&block);
4055
 
4056
  /* Either STAT= and/or ERRMSG is present.  */
4057
  if (code->expr1 || code->expr2)
4058
    {
4059
      tree gfc_int4_type_node = gfc_get_int_type (4);
4060
 
4061
      stat = gfc_create_var (gfc_int4_type_node, "stat");
4062
      pstat = gfc_build_addr_expr (NULL_TREE, stat);
4063
 
4064
      error_label = gfc_build_label_decl (NULL_TREE);
4065
      TREE_USED (error_label) = 1;
4066
    }
4067
 
4068
  for (al = code->ext.alloc.list; al != NULL; al = al->next)
4069
    {
4070
      expr = gfc_copy_expr (al->expr);
4071
 
4072
      if (expr->ts.type == BT_CLASS)
4073
        gfc_add_component_ref (expr, "$data");
4074
 
4075
      gfc_init_se (&se, NULL);
4076
      gfc_start_block (&se.pre);
4077
 
4078
      se.want_pointer = 1;
4079
      se.descriptor_only = 1;
4080
      gfc_conv_expr (&se, expr);
4081
 
4082
      if (!gfc_array_allocate (&se, expr, pstat))
4083
        {
4084
          /* A scalar or derived type.  */
4085
 
4086
          /* Determine allocate size.  */
4087
          if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4088
            {
4089
              gfc_expr *sz;
4090
              gfc_se se_sz;
4091
              sz = gfc_copy_expr (code->expr3);
4092
              gfc_add_component_ref (sz, "$vptr");
4093
              gfc_add_component_ref (sz, "$size");
4094
              gfc_init_se (&se_sz, NULL);
4095
              gfc_conv_expr (&se_sz, sz);
4096
              gfc_free_expr (sz);
4097
              memsz = se_sz.expr;
4098
            }
4099
          else if (code->expr3 && code->expr3->ts.type != BT_CLASS)
4100
            memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4101
          else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4102
            memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4103
          else
4104
            memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4105
 
4106
          if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4107
            memsz = se.string_length;
4108
 
4109
          /* Allocate - for non-pointers with re-alloc checking.  */
4110
          {
4111
            gfc_ref *ref;
4112
            bool allocatable;
4113
 
4114
            ref = expr->ref;
4115
 
4116
            /* Find the last reference in the chain.  */
4117
            while (ref && ref->next != NULL)
4118
              {
4119
                gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT);
4120
                ref = ref->next;
4121
              }
4122
 
4123
            if (!ref)
4124
              allocatable = expr->symtree->n.sym->attr.allocatable;
4125
            else
4126
              allocatable = ref->u.c.component->attr.allocatable;
4127
 
4128
            if (allocatable)
4129
              tmp = gfc_allocate_array_with_status (&se.pre, se.expr, memsz,
4130
                                                    pstat, expr);
4131
            else
4132
              tmp = gfc_allocate_with_status (&se.pre, memsz, pstat);
4133
          }
4134
 
4135
          tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
4136
                             fold_convert (TREE_TYPE (se.expr), tmp));
4137
          gfc_add_expr_to_block (&se.pre, tmp);
4138
 
4139
          if (code->expr1 || code->expr2)
4140
            {
4141
              tmp = build1_v (GOTO_EXPR, error_label);
4142
              parm = fold_build2 (NE_EXPR, boolean_type_node,
4143
                                  stat, build_int_cst (TREE_TYPE (stat), 0));
4144
              tmp = fold_build3 (COND_EXPR, void_type_node,
4145
                                 parm, tmp, build_empty_stmt (input_location));
4146
              gfc_add_expr_to_block (&se.pre, tmp);
4147
            }
4148
 
4149
          if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4150
            {
4151
              tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4152
              tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4153
              gfc_add_expr_to_block (&se.pre, tmp);
4154
            }
4155
 
4156
        }
4157
 
4158
      tmp = gfc_finish_block (&se.pre);
4159
      gfc_add_expr_to_block (&block, tmp);
4160
 
4161
      /* Initialization via SOURCE block.  */
4162
      if (code->expr3)
4163
        {
4164
          gfc_expr *rhs = gfc_copy_expr (code->expr3);
4165
          if (al->expr->ts.type == BT_CLASS)
4166
            {
4167
              gfc_se dst,src;
4168
              if (rhs->ts.type == BT_CLASS)
4169
                gfc_add_component_ref (rhs, "$data");
4170
              gfc_init_se (&dst, NULL);
4171
              gfc_init_se (&src, NULL);
4172
              gfc_conv_expr (&dst, expr);
4173
              gfc_conv_expr (&src, rhs);
4174
              gfc_add_block_to_block (&block, &src.pre);
4175
              tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4176
            }
4177
          else
4178
            tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4179
                                        rhs, false);
4180
          gfc_free_expr (rhs);
4181
          gfc_add_expr_to_block (&block, tmp);
4182
        }
4183
 
4184
      /* Allocation of CLASS entities.  */
4185
      gfc_free_expr (expr);
4186
      expr = al->expr;
4187
      if (expr->ts.type == BT_CLASS)
4188
        {
4189
          gfc_expr *lhs,*rhs;
4190
          gfc_se lse;
4191
 
4192
          /* Initialize VPTR for CLASS objects.  */
4193
          lhs = gfc_expr_to_initialize (expr);
4194
          gfc_add_component_ref (lhs, "$vptr");
4195
          rhs = NULL;
4196
          if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4197
            {
4198
              /* VPTR must be determined at run time.  */
4199
              rhs = gfc_copy_expr (code->expr3);
4200
              gfc_add_component_ref (rhs, "$vptr");
4201
              tmp = gfc_trans_pointer_assignment (lhs, rhs);
4202
              gfc_add_expr_to_block (&block, tmp);
4203
              gfc_free_expr (rhs);
4204
            }
4205
          else
4206
            {
4207
              /* VPTR is fixed at compile time.  */
4208
              gfc_symbol *vtab;
4209
              gfc_typespec *ts;
4210
              if (code->expr3)
4211
                ts = &code->expr3->ts;
4212
              else if (expr->ts.type == BT_DERIVED)
4213
                ts = &expr->ts;
4214
              else if (code->ext.alloc.ts.type == BT_DERIVED)
4215
                ts = &code->ext.alloc.ts;
4216
              else if (expr->ts.type == BT_CLASS)
4217
                ts = &expr->ts.u.derived->components->ts;
4218
              else
4219
                ts = &expr->ts;
4220
 
4221
              if (ts->type == BT_DERIVED)
4222
                {
4223
                  vtab = gfc_find_derived_vtab (ts->u.derived);
4224
                  gcc_assert (vtab);
4225
                  gfc_init_se (&lse, NULL);
4226
                  lse.want_pointer = 1;
4227
                  gfc_conv_expr (&lse, lhs);
4228
                  tmp = gfc_build_addr_expr (NULL_TREE,
4229
                                             gfc_get_symbol_decl (vtab));
4230
                  gfc_add_modify (&block, lse.expr,
4231
                        fold_convert (TREE_TYPE (lse.expr), tmp));
4232
                }
4233
            }
4234
        }
4235
 
4236
    }
4237
 
4238
  /* STAT block.  */
4239
  if (code->expr1)
4240
    {
4241
      tmp = build1_v (LABEL_EXPR, error_label);
4242
      gfc_add_expr_to_block (&block, tmp);
4243
 
4244
      gfc_init_se (&se, NULL);
4245
      gfc_conv_expr_lhs (&se, code->expr1);
4246
      tmp = convert (TREE_TYPE (se.expr), stat);
4247
      gfc_add_modify (&block, se.expr, tmp);
4248
    }
4249
 
4250
  /* ERRMSG block.  */
4251
  if (code->expr2)
4252
    {
4253
      /* A better error message may be possible, but not required.  */
4254
      const char *msg = "Attempt to allocate an allocated object";
4255
      tree errmsg, slen, dlen;
4256
 
4257
      gfc_init_se (&se, NULL);
4258
      gfc_conv_expr_lhs (&se, code->expr2);
4259
 
4260
      errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4261
 
4262
      gfc_add_modify (&block, errmsg,
4263
                gfc_build_addr_expr (pchar_type_node,
4264
                        gfc_build_localized_cstring_const (msg)));
4265
 
4266
      slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4267
      dlen = gfc_get_expr_charlen (code->expr2);
4268
      slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4269
 
4270
      dlen = build_call_expr_loc (input_location,
4271
                              built_in_decls[BUILT_IN_MEMCPY], 3,
4272
                gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4273
 
4274
      tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
4275
                         build_int_cst (TREE_TYPE (stat), 0));
4276
 
4277
      tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4278
 
4279
      gfc_add_expr_to_block (&block, tmp);
4280
    }
4281
 
4282
  return gfc_finish_block (&block);
4283
}
4284
 
4285
 
4286
/* Translate a DEALLOCATE statement.  */
4287
 
4288
tree
4289
gfc_trans_deallocate (gfc_code *code)
4290
{
4291
  gfc_se se;
4292
  gfc_alloc *al;
4293
  gfc_expr *expr;
4294
  tree apstat, astat, pstat, stat, tmp;
4295
  stmtblock_t block;
4296
 
4297
  pstat = apstat = stat = astat = tmp = NULL_TREE;
4298
 
4299
  gfc_start_block (&block);
4300
 
4301
  /* Count the number of failed deallocations.  If deallocate() was
4302
     called with STAT= , then set STAT to the count.  If deallocate
4303
     was called with ERRMSG, then set ERRMG to a string.  */
4304
  if (code->expr1 || code->expr2)
4305
    {
4306
      tree gfc_int4_type_node = gfc_get_int_type (4);
4307
 
4308
      stat = gfc_create_var (gfc_int4_type_node, "stat");
4309
      pstat = gfc_build_addr_expr (NULL_TREE, stat);
4310
 
4311
      /* Running total of possible deallocation failures.  */
4312
      astat = gfc_create_var (gfc_int4_type_node, "astat");
4313
      apstat = gfc_build_addr_expr (NULL_TREE, astat);
4314
 
4315
      /* Initialize astat to 0.  */
4316
      gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4317
    }
4318
 
4319
  for (al = code->ext.alloc.list; al != NULL; al = al->next)
4320
    {
4321
      expr = al->expr;
4322
      gcc_assert (expr->expr_type == EXPR_VARIABLE);
4323
 
4324
      gfc_init_se (&se, NULL);
4325
      gfc_start_block (&se.pre);
4326
 
4327
      se.want_pointer = 1;
4328
      se.descriptor_only = 1;
4329
      gfc_conv_expr (&se, expr);
4330
 
4331
      if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4332
        {
4333
          gfc_ref *ref;
4334
          gfc_ref *last = NULL;
4335
          for (ref = expr->ref; ref; ref = ref->next)
4336
            if (ref->type == REF_COMPONENT)
4337
              last = ref;
4338
 
4339
          /* Do not deallocate the components of a derived type
4340
             ultimate pointer component.  */
4341
          if (!(last && last->u.c.component->attr.pointer)
4342
                && !(!last && expr->symtree->n.sym->attr.pointer))
4343
            {
4344
              tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
4345
                                               expr->rank);
4346
              gfc_add_expr_to_block (&se.pre, tmp);
4347
            }
4348
        }
4349
 
4350
      if (expr->rank)
4351
        tmp = gfc_array_deallocate (se.expr, pstat, expr);
4352
      else
4353
        {
4354
          tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4355
          gfc_add_expr_to_block (&se.pre, tmp);
4356
 
4357
          tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4358
                             se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4359
        }
4360
 
4361
      gfc_add_expr_to_block (&se.pre, tmp);
4362
 
4363
      /* Keep track of the number of failed deallocations by adding stat
4364
         of the last deallocation to the running total.  */
4365
      if (code->expr1 || code->expr2)
4366
        {
4367
          apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4368
          gfc_add_modify (&se.pre, astat, apstat);
4369
        }
4370
 
4371
      tmp = gfc_finish_block (&se.pre);
4372
      gfc_add_expr_to_block (&block, tmp);
4373
 
4374
    }
4375
 
4376
  /* Set STAT.  */
4377
  if (code->expr1)
4378
    {
4379
      gfc_init_se (&se, NULL);
4380
      gfc_conv_expr_lhs (&se, code->expr1);
4381
      tmp = convert (TREE_TYPE (se.expr), astat);
4382
      gfc_add_modify (&block, se.expr, tmp);
4383
    }
4384
 
4385
  /* Set ERRMSG.  */
4386
  if (code->expr2)
4387
    {
4388
      /* A better error message may be possible, but not required.  */
4389
      const char *msg = "Attempt to deallocate an unallocated object";
4390
      tree errmsg, slen, dlen;
4391
 
4392
      gfc_init_se (&se, NULL);
4393
      gfc_conv_expr_lhs (&se, code->expr2);
4394
 
4395
      errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4396
 
4397
      gfc_add_modify (&block, errmsg,
4398
                gfc_build_addr_expr (pchar_type_node,
4399
                        gfc_build_localized_cstring_const (msg)));
4400
 
4401
      slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4402
      dlen = gfc_get_expr_charlen (code->expr2);
4403
      slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4404
 
4405
      dlen = build_call_expr_loc (input_location,
4406
                              built_in_decls[BUILT_IN_MEMCPY], 3,
4407
                gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4408
 
4409
      tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
4410
                         build_int_cst (TREE_TYPE (astat), 0));
4411
 
4412
      tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4413
 
4414
      gfc_add_expr_to_block (&block, tmp);
4415
    }
4416
 
4417
  return gfc_finish_block (&block);
4418
}
4419
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.