1 |
285 |
jeremybenn |
/* Array translation routines
|
2 |
|
|
Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
|
3 |
|
|
Free Software Foundation, Inc.
|
4 |
|
|
Contributed by Paul Brook <paul@nowt.org>
|
5 |
|
|
and Steven Bosscher <s.bosscher@student.tudelft.nl>
|
6 |
|
|
|
7 |
|
|
This file is part of GCC.
|
8 |
|
|
|
9 |
|
|
GCC is free software; you can redistribute it and/or modify it under
|
10 |
|
|
the terms of the GNU General Public License as published by the Free
|
11 |
|
|
Software Foundation; either version 3, or (at your option) any later
|
12 |
|
|
version.
|
13 |
|
|
|
14 |
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
15 |
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
16 |
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
17 |
|
|
for more details.
|
18 |
|
|
|
19 |
|
|
You should have received a copy of the GNU General Public License
|
20 |
|
|
along with GCC; see the file COPYING3. If not see
|
21 |
|
|
<http://www.gnu.org/licenses/>. */
|
22 |
|
|
|
23 |
|
|
/* trans-array.c-- Various array related code, including scalarization,
|
24 |
|
|
allocation, initialization and other support routines. */
|
25 |
|
|
|
26 |
|
|
/* How the scalarizer works.
|
27 |
|
|
In gfortran, array expressions use the same core routines as scalar
|
28 |
|
|
expressions.
|
29 |
|
|
First, a Scalarization State (SS) chain is built. This is done by walking
|
30 |
|
|
the expression tree, and building a linear list of the terms in the
|
31 |
|
|
expression. As the tree is walked, scalar subexpressions are translated.
|
32 |
|
|
|
33 |
|
|
The scalarization parameters are stored in a gfc_loopinfo structure.
|
34 |
|
|
First the start and stride of each term is calculated by
|
35 |
|
|
gfc_conv_ss_startstride. During this process the expressions for the array
|
36 |
|
|
descriptors and data pointers are also translated.
|
37 |
|
|
|
38 |
|
|
If the expression is an assignment, we must then resolve any dependencies.
|
39 |
|
|
In fortran all the rhs values of an assignment must be evaluated before
|
40 |
|
|
any assignments take place. This can require a temporary array to store the
|
41 |
|
|
values. We also require a temporary when we are passing array expressions
|
42 |
|
|
or vector subscripts as procedure parameters.
|
43 |
|
|
|
44 |
|
|
Array sections are passed without copying to a temporary. These use the
|
45 |
|
|
scalarizer to determine the shape of the section. The flag
|
46 |
|
|
loop->array_parameter tells the scalarizer that the actual values and loop
|
47 |
|
|
variables will not be required.
|
48 |
|
|
|
49 |
|
|
The function gfc_conv_loop_setup generates the scalarization setup code.
|
50 |
|
|
It determines the range of the scalarizing loop variables. If a temporary
|
51 |
|
|
is required, this is created and initialized. Code for scalar expressions
|
52 |
|
|
taken outside the loop is also generated at this time. Next the offset and
|
53 |
|
|
scaling required to translate from loop variables to array indices for each
|
54 |
|
|
term is calculated.
|
55 |
|
|
|
56 |
|
|
A call to gfc_start_scalarized_body marks the start of the scalarized
|
57 |
|
|
expression. This creates a scope and declares the loop variables. Before
|
58 |
|
|
calling this gfc_make_ss_chain_used must be used to indicate which terms
|
59 |
|
|
will be used inside this loop.
|
60 |
|
|
|
61 |
|
|
The scalar gfc_conv_* functions are then used to build the main body of the
|
62 |
|
|
scalarization loop. Scalarization loop variables and precalculated scalar
|
63 |
|
|
values are automatically substituted. Note that gfc_advance_se_ss_chain
|
64 |
|
|
must be used, rather than changing the se->ss directly.
|
65 |
|
|
|
66 |
|
|
For assignment expressions requiring a temporary two sub loops are
|
67 |
|
|
generated. The first stores the result of the expression in the temporary,
|
68 |
|
|
the second copies it to the result. A call to
|
69 |
|
|
gfc_trans_scalarized_loop_boundary marks the end of the main loop code and
|
70 |
|
|
the start of the copying loop. The temporary may be less than full rank.
|
71 |
|
|
|
72 |
|
|
Finally gfc_trans_scalarizing_loops is called to generate the implicit do
|
73 |
|
|
loops. The loops are added to the pre chain of the loopinfo. The post
|
74 |
|
|
chain may still contain cleanup code.
|
75 |
|
|
|
76 |
|
|
After the loop code has been added into its parent scope gfc_cleanup_loop
|
77 |
|
|
is called to free all the SS allocated by the scalarizer. */
|
78 |
|
|
|
79 |
|
|
#include "config.h"
|
80 |
|
|
#include "system.h"
|
81 |
|
|
#include "coretypes.h"
|
82 |
|
|
#include "tree.h"
|
83 |
|
|
#include "gimple.h"
|
84 |
|
|
#include "ggc.h"
|
85 |
|
|
#include "toplev.h"
|
86 |
|
|
#include "real.h"
|
87 |
|
|
#include "flags.h"
|
88 |
|
|
#include "gfortran.h"
|
89 |
|
|
#include "trans.h"
|
90 |
|
|
#include "trans-stmt.h"
|
91 |
|
|
#include "trans-types.h"
|
92 |
|
|
#include "trans-array.h"
|
93 |
|
|
#include "trans-const.h"
|
94 |
|
|
#include "dependency.h"
|
95 |
|
|
|
96 |
|
|
static gfc_ss *gfc_walk_subexpr (gfc_ss *, gfc_expr *);
|
97 |
|
|
static bool gfc_get_array_constructor_size (mpz_t *, gfc_constructor *);
|
98 |
|
|
|
99 |
|
|
/* The contents of this structure aren't actually used, just the address. */
|
100 |
|
|
static gfc_ss gfc_ss_terminator_var;
|
101 |
|
|
gfc_ss * const gfc_ss_terminator = &gfc_ss_terminator_var;
|
102 |
|
|
|
103 |
|
|
|
104 |
|
|
static tree
|
105 |
|
|
gfc_array_dataptr_type (tree desc)
|
106 |
|
|
{
|
107 |
|
|
return (GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc)));
|
108 |
|
|
}
|
109 |
|
|
|
110 |
|
|
|
111 |
|
|
/* Build expressions to access the members of an array descriptor.
|
112 |
|
|
It's surprisingly easy to mess up here, so never access
|
113 |
|
|
an array descriptor by "brute force", always use these
|
114 |
|
|
functions. This also avoids problems if we change the format
|
115 |
|
|
of an array descriptor.
|
116 |
|
|
|
117 |
|
|
To understand these magic numbers, look at the comments
|
118 |
|
|
before gfc_build_array_type() in trans-types.c.
|
119 |
|
|
|
120 |
|
|
The code within these defines should be the only code which knows the format
|
121 |
|
|
of an array descriptor.
|
122 |
|
|
|
123 |
|
|
Any code just needing to read obtain the bounds of an array should use
|
124 |
|
|
gfc_conv_array_* rather than the following functions as these will return
|
125 |
|
|
know constant values, and work with arrays which do not have descriptors.
|
126 |
|
|
|
127 |
|
|
Don't forget to #undef these! */
|
128 |
|
|
|
129 |
|
|
#define DATA_FIELD 0
|
130 |
|
|
#define OFFSET_FIELD 1
|
131 |
|
|
#define DTYPE_FIELD 2
|
132 |
|
|
#define DIMENSION_FIELD 3
|
133 |
|
|
|
134 |
|
|
#define STRIDE_SUBFIELD 0
|
135 |
|
|
#define LBOUND_SUBFIELD 1
|
136 |
|
|
#define UBOUND_SUBFIELD 2
|
137 |
|
|
|
138 |
|
|
/* This provides READ-ONLY access to the data field. The field itself
|
139 |
|
|
doesn't have the proper type. */
|
140 |
|
|
|
141 |
|
|
tree
|
142 |
|
|
gfc_conv_descriptor_data_get (tree desc)
|
143 |
|
|
{
|
144 |
|
|
tree field, type, t;
|
145 |
|
|
|
146 |
|
|
type = TREE_TYPE (desc);
|
147 |
|
|
gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
|
148 |
|
|
|
149 |
|
|
field = TYPE_FIELDS (type);
|
150 |
|
|
gcc_assert (DATA_FIELD == 0);
|
151 |
|
|
|
152 |
|
|
t = fold_build3 (COMPONENT_REF, TREE_TYPE (field), desc, field, NULL_TREE);
|
153 |
|
|
t = fold_convert (GFC_TYPE_ARRAY_DATAPTR_TYPE (type), t);
|
154 |
|
|
|
155 |
|
|
return t;
|
156 |
|
|
}
|
157 |
|
|
|
158 |
|
|
/* This provides WRITE access to the data field.
|
159 |
|
|
|
160 |
|
|
TUPLES_P is true if we are generating tuples.
|
161 |
|
|
|
162 |
|
|
This function gets called through the following macros:
|
163 |
|
|
gfc_conv_descriptor_data_set
|
164 |
|
|
gfc_conv_descriptor_data_set. */
|
165 |
|
|
|
166 |
|
|
void
|
167 |
|
|
gfc_conv_descriptor_data_set (stmtblock_t *block, tree desc, tree value)
|
168 |
|
|
{
|
169 |
|
|
tree field, type, t;
|
170 |
|
|
|
171 |
|
|
type = TREE_TYPE (desc);
|
172 |
|
|
gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
|
173 |
|
|
|
174 |
|
|
field = TYPE_FIELDS (type);
|
175 |
|
|
gcc_assert (DATA_FIELD == 0);
|
176 |
|
|
|
177 |
|
|
t = fold_build3 (COMPONENT_REF, TREE_TYPE (field), desc, field, NULL_TREE);
|
178 |
|
|
gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value));
|
179 |
|
|
}
|
180 |
|
|
|
181 |
|
|
|
182 |
|
|
/* This provides address access to the data field. This should only be
|
183 |
|
|
used by array allocation, passing this on to the runtime. */
|
184 |
|
|
|
185 |
|
|
tree
|
186 |
|
|
gfc_conv_descriptor_data_addr (tree desc)
|
187 |
|
|
{
|
188 |
|
|
tree field, type, t;
|
189 |
|
|
|
190 |
|
|
type = TREE_TYPE (desc);
|
191 |
|
|
gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
|
192 |
|
|
|
193 |
|
|
field = TYPE_FIELDS (type);
|
194 |
|
|
gcc_assert (DATA_FIELD == 0);
|
195 |
|
|
|
196 |
|
|
t = fold_build3 (COMPONENT_REF, TREE_TYPE (field), desc, field, NULL_TREE);
|
197 |
|
|
return gfc_build_addr_expr (NULL_TREE, t);
|
198 |
|
|
}
|
199 |
|
|
|
200 |
|
|
static tree
|
201 |
|
|
gfc_conv_descriptor_offset (tree desc)
|
202 |
|
|
{
|
203 |
|
|
tree type;
|
204 |
|
|
tree field;
|
205 |
|
|
|
206 |
|
|
type = TREE_TYPE (desc);
|
207 |
|
|
gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
|
208 |
|
|
|
209 |
|
|
field = gfc_advance_chain (TYPE_FIELDS (type), OFFSET_FIELD);
|
210 |
|
|
gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
|
211 |
|
|
|
212 |
|
|
return fold_build3 (COMPONENT_REF, TREE_TYPE (field),
|
213 |
|
|
desc, field, NULL_TREE);
|
214 |
|
|
}
|
215 |
|
|
|
216 |
|
|
tree
|
217 |
|
|
gfc_conv_descriptor_offset_get (tree desc)
|
218 |
|
|
{
|
219 |
|
|
return gfc_conv_descriptor_offset (desc);
|
220 |
|
|
}
|
221 |
|
|
|
222 |
|
|
void
|
223 |
|
|
gfc_conv_descriptor_offset_set (stmtblock_t *block, tree desc,
|
224 |
|
|
tree value)
|
225 |
|
|
{
|
226 |
|
|
tree t = gfc_conv_descriptor_offset (desc);
|
227 |
|
|
gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
|
228 |
|
|
}
|
229 |
|
|
|
230 |
|
|
|
231 |
|
|
tree
|
232 |
|
|
gfc_conv_descriptor_dtype (tree desc)
|
233 |
|
|
{
|
234 |
|
|
tree field;
|
235 |
|
|
tree type;
|
236 |
|
|
|
237 |
|
|
type = TREE_TYPE (desc);
|
238 |
|
|
gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
|
239 |
|
|
|
240 |
|
|
field = gfc_advance_chain (TYPE_FIELDS (type), DTYPE_FIELD);
|
241 |
|
|
gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
|
242 |
|
|
|
243 |
|
|
return fold_build3 (COMPONENT_REF, TREE_TYPE (field),
|
244 |
|
|
desc, field, NULL_TREE);
|
245 |
|
|
}
|
246 |
|
|
|
247 |
|
|
static tree
|
248 |
|
|
gfc_conv_descriptor_dimension (tree desc, tree dim)
|
249 |
|
|
{
|
250 |
|
|
tree field;
|
251 |
|
|
tree type;
|
252 |
|
|
tree tmp;
|
253 |
|
|
|
254 |
|
|
type = TREE_TYPE (desc);
|
255 |
|
|
gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
|
256 |
|
|
|
257 |
|
|
field = gfc_advance_chain (TYPE_FIELDS (type), DIMENSION_FIELD);
|
258 |
|
|
gcc_assert (field != NULL_TREE
|
259 |
|
|
&& TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
|
260 |
|
|
&& TREE_CODE (TREE_TYPE (TREE_TYPE (field))) == RECORD_TYPE);
|
261 |
|
|
|
262 |
|
|
tmp = fold_build3 (COMPONENT_REF, TREE_TYPE (field),
|
263 |
|
|
desc, field, NULL_TREE);
|
264 |
|
|
tmp = gfc_build_array_ref (tmp, dim, NULL);
|
265 |
|
|
return tmp;
|
266 |
|
|
}
|
267 |
|
|
|
268 |
|
|
static tree
|
269 |
|
|
gfc_conv_descriptor_stride (tree desc, tree dim)
|
270 |
|
|
{
|
271 |
|
|
tree tmp;
|
272 |
|
|
tree field;
|
273 |
|
|
|
274 |
|
|
tmp = gfc_conv_descriptor_dimension (desc, dim);
|
275 |
|
|
field = TYPE_FIELDS (TREE_TYPE (tmp));
|
276 |
|
|
field = gfc_advance_chain (field, STRIDE_SUBFIELD);
|
277 |
|
|
gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
|
278 |
|
|
|
279 |
|
|
tmp = fold_build3 (COMPONENT_REF, TREE_TYPE (field),
|
280 |
|
|
tmp, field, NULL_TREE);
|
281 |
|
|
return tmp;
|
282 |
|
|
}
|
283 |
|
|
|
284 |
|
|
tree
|
285 |
|
|
gfc_conv_descriptor_stride_get (tree desc, tree dim)
|
286 |
|
|
{
|
287 |
|
|
tree type = TREE_TYPE (desc);
|
288 |
|
|
gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
|
289 |
|
|
if (integer_zerop (dim)
|
290 |
|
|
&& GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
|
291 |
|
|
return gfc_index_one_node;
|
292 |
|
|
|
293 |
|
|
return gfc_conv_descriptor_stride (desc, dim);
|
294 |
|
|
}
|
295 |
|
|
|
296 |
|
|
void
|
297 |
|
|
gfc_conv_descriptor_stride_set (stmtblock_t *block, tree desc,
|
298 |
|
|
tree dim, tree value)
|
299 |
|
|
{
|
300 |
|
|
tree t = gfc_conv_descriptor_stride (desc, dim);
|
301 |
|
|
gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
|
302 |
|
|
}
|
303 |
|
|
|
304 |
|
|
static tree
|
305 |
|
|
gfc_conv_descriptor_lbound (tree desc, tree dim)
|
306 |
|
|
{
|
307 |
|
|
tree tmp;
|
308 |
|
|
tree field;
|
309 |
|
|
|
310 |
|
|
tmp = gfc_conv_descriptor_dimension (desc, dim);
|
311 |
|
|
field = TYPE_FIELDS (TREE_TYPE (tmp));
|
312 |
|
|
field = gfc_advance_chain (field, LBOUND_SUBFIELD);
|
313 |
|
|
gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
|
314 |
|
|
|
315 |
|
|
tmp = fold_build3 (COMPONENT_REF, TREE_TYPE (field),
|
316 |
|
|
tmp, field, NULL_TREE);
|
317 |
|
|
return tmp;
|
318 |
|
|
}
|
319 |
|
|
|
320 |
|
|
tree
|
321 |
|
|
gfc_conv_descriptor_lbound_get (tree desc, tree dim)
|
322 |
|
|
{
|
323 |
|
|
return gfc_conv_descriptor_lbound (desc, dim);
|
324 |
|
|
}
|
325 |
|
|
|
326 |
|
|
void
|
327 |
|
|
gfc_conv_descriptor_lbound_set (stmtblock_t *block, tree desc,
|
328 |
|
|
tree dim, tree value)
|
329 |
|
|
{
|
330 |
|
|
tree t = gfc_conv_descriptor_lbound (desc, dim);
|
331 |
|
|
gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
|
332 |
|
|
}
|
333 |
|
|
|
334 |
|
|
static tree
|
335 |
|
|
gfc_conv_descriptor_ubound (tree desc, tree dim)
|
336 |
|
|
{
|
337 |
|
|
tree tmp;
|
338 |
|
|
tree field;
|
339 |
|
|
|
340 |
|
|
tmp = gfc_conv_descriptor_dimension (desc, dim);
|
341 |
|
|
field = TYPE_FIELDS (TREE_TYPE (tmp));
|
342 |
|
|
field = gfc_advance_chain (field, UBOUND_SUBFIELD);
|
343 |
|
|
gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
|
344 |
|
|
|
345 |
|
|
tmp = fold_build3 (COMPONENT_REF, TREE_TYPE (field),
|
346 |
|
|
tmp, field, NULL_TREE);
|
347 |
|
|
return tmp;
|
348 |
|
|
}
|
349 |
|
|
|
350 |
|
|
tree
|
351 |
|
|
gfc_conv_descriptor_ubound_get (tree desc, tree dim)
|
352 |
|
|
{
|
353 |
|
|
return gfc_conv_descriptor_ubound (desc, dim);
|
354 |
|
|
}
|
355 |
|
|
|
356 |
|
|
void
|
357 |
|
|
gfc_conv_descriptor_ubound_set (stmtblock_t *block, tree desc,
|
358 |
|
|
tree dim, tree value)
|
359 |
|
|
{
|
360 |
|
|
tree t = gfc_conv_descriptor_ubound (desc, dim);
|
361 |
|
|
gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
|
362 |
|
|
}
|
363 |
|
|
|
364 |
|
|
/* Build a null array descriptor constructor. */
|
365 |
|
|
|
366 |
|
|
tree
|
367 |
|
|
gfc_build_null_descriptor (tree type)
|
368 |
|
|
{
|
369 |
|
|
tree field;
|
370 |
|
|
tree tmp;
|
371 |
|
|
|
372 |
|
|
gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
|
373 |
|
|
gcc_assert (DATA_FIELD == 0);
|
374 |
|
|
field = TYPE_FIELDS (type);
|
375 |
|
|
|
376 |
|
|
/* Set a NULL data pointer. */
|
377 |
|
|
tmp = build_constructor_single (type, field, null_pointer_node);
|
378 |
|
|
TREE_CONSTANT (tmp) = 1;
|
379 |
|
|
/* All other fields are ignored. */
|
380 |
|
|
|
381 |
|
|
return tmp;
|
382 |
|
|
}
|
383 |
|
|
|
384 |
|
|
|
385 |
|
|
/* Cleanup those #defines. */
|
386 |
|
|
|
387 |
|
|
#undef DATA_FIELD
|
388 |
|
|
#undef OFFSET_FIELD
|
389 |
|
|
#undef DTYPE_FIELD
|
390 |
|
|
#undef DIMENSION_FIELD
|
391 |
|
|
#undef STRIDE_SUBFIELD
|
392 |
|
|
#undef LBOUND_SUBFIELD
|
393 |
|
|
#undef UBOUND_SUBFIELD
|
394 |
|
|
|
395 |
|
|
|
396 |
|
|
/* Mark a SS chain as used. Flags specifies in which loops the SS is used.
|
397 |
|
|
flags & 1 = Main loop body.
|
398 |
|
|
flags & 2 = temp copy loop. */
|
399 |
|
|
|
400 |
|
|
void
|
401 |
|
|
gfc_mark_ss_chain_used (gfc_ss * ss, unsigned flags)
|
402 |
|
|
{
|
403 |
|
|
for (; ss != gfc_ss_terminator; ss = ss->next)
|
404 |
|
|
ss->useflags = flags;
|
405 |
|
|
}
|
406 |
|
|
|
407 |
|
|
static void gfc_free_ss (gfc_ss *);
|
408 |
|
|
|
409 |
|
|
|
410 |
|
|
/* Free a gfc_ss chain. */
|
411 |
|
|
|
412 |
|
|
static void
|
413 |
|
|
gfc_free_ss_chain (gfc_ss * ss)
|
414 |
|
|
{
|
415 |
|
|
gfc_ss *next;
|
416 |
|
|
|
417 |
|
|
while (ss != gfc_ss_terminator)
|
418 |
|
|
{
|
419 |
|
|
gcc_assert (ss != NULL);
|
420 |
|
|
next = ss->next;
|
421 |
|
|
gfc_free_ss (ss);
|
422 |
|
|
ss = next;
|
423 |
|
|
}
|
424 |
|
|
}
|
425 |
|
|
|
426 |
|
|
|
427 |
|
|
/* Free a SS. */
|
428 |
|
|
|
429 |
|
|
static void
|
430 |
|
|
gfc_free_ss (gfc_ss * ss)
|
431 |
|
|
{
|
432 |
|
|
int n;
|
433 |
|
|
|
434 |
|
|
switch (ss->type)
|
435 |
|
|
{
|
436 |
|
|
case GFC_SS_SECTION:
|
437 |
|
|
for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
|
438 |
|
|
{
|
439 |
|
|
if (ss->data.info.subscript[n])
|
440 |
|
|
gfc_free_ss_chain (ss->data.info.subscript[n]);
|
441 |
|
|
}
|
442 |
|
|
break;
|
443 |
|
|
|
444 |
|
|
default:
|
445 |
|
|
break;
|
446 |
|
|
}
|
447 |
|
|
|
448 |
|
|
gfc_free (ss);
|
449 |
|
|
}
|
450 |
|
|
|
451 |
|
|
|
452 |
|
|
/* Free all the SS associated with a loop. */
|
453 |
|
|
|
454 |
|
|
void
|
455 |
|
|
gfc_cleanup_loop (gfc_loopinfo * loop)
|
456 |
|
|
{
|
457 |
|
|
gfc_ss *ss;
|
458 |
|
|
gfc_ss *next;
|
459 |
|
|
|
460 |
|
|
ss = loop->ss;
|
461 |
|
|
while (ss != gfc_ss_terminator)
|
462 |
|
|
{
|
463 |
|
|
gcc_assert (ss != NULL);
|
464 |
|
|
next = ss->loop_chain;
|
465 |
|
|
gfc_free_ss (ss);
|
466 |
|
|
ss = next;
|
467 |
|
|
}
|
468 |
|
|
}
|
469 |
|
|
|
470 |
|
|
|
471 |
|
|
/* Associate a SS chain with a loop. */
|
472 |
|
|
|
473 |
|
|
void
|
474 |
|
|
gfc_add_ss_to_loop (gfc_loopinfo * loop, gfc_ss * head)
|
475 |
|
|
{
|
476 |
|
|
gfc_ss *ss;
|
477 |
|
|
|
478 |
|
|
if (head == gfc_ss_terminator)
|
479 |
|
|
return;
|
480 |
|
|
|
481 |
|
|
ss = head;
|
482 |
|
|
for (; ss && ss != gfc_ss_terminator; ss = ss->next)
|
483 |
|
|
{
|
484 |
|
|
if (ss->next == gfc_ss_terminator)
|
485 |
|
|
ss->loop_chain = loop->ss;
|
486 |
|
|
else
|
487 |
|
|
ss->loop_chain = ss->next;
|
488 |
|
|
}
|
489 |
|
|
gcc_assert (ss == gfc_ss_terminator);
|
490 |
|
|
loop->ss = head;
|
491 |
|
|
}
|
492 |
|
|
|
493 |
|
|
|
494 |
|
|
/* Generate an initializer for a static pointer or allocatable array. */
|
495 |
|
|
|
496 |
|
|
void
|
497 |
|
|
gfc_trans_static_array_pointer (gfc_symbol * sym)
|
498 |
|
|
{
|
499 |
|
|
tree type;
|
500 |
|
|
|
501 |
|
|
gcc_assert (TREE_STATIC (sym->backend_decl));
|
502 |
|
|
/* Just zero the data member. */
|
503 |
|
|
type = TREE_TYPE (sym->backend_decl);
|
504 |
|
|
DECL_INITIAL (sym->backend_decl) = gfc_build_null_descriptor (type);
|
505 |
|
|
}
|
506 |
|
|
|
507 |
|
|
|
508 |
|
|
/* If the bounds of SE's loop have not yet been set, see if they can be
|
509 |
|
|
determined from array spec AS, which is the array spec of a called
|
510 |
|
|
function. MAPPING maps the callee's dummy arguments to the values
|
511 |
|
|
that the caller is passing. Add any initialization and finalization
|
512 |
|
|
code to SE. */
|
513 |
|
|
|
514 |
|
|
void
|
515 |
|
|
gfc_set_loop_bounds_from_array_spec (gfc_interface_mapping * mapping,
|
516 |
|
|
gfc_se * se, gfc_array_spec * as)
|
517 |
|
|
{
|
518 |
|
|
int n, dim;
|
519 |
|
|
gfc_se tmpse;
|
520 |
|
|
tree lower;
|
521 |
|
|
tree upper;
|
522 |
|
|
tree tmp;
|
523 |
|
|
|
524 |
|
|
if (as && as->type == AS_EXPLICIT)
|
525 |
|
|
for (dim = 0; dim < se->loop->dimen; dim++)
|
526 |
|
|
{
|
527 |
|
|
n = se->loop->order[dim];
|
528 |
|
|
if (se->loop->to[n] == NULL_TREE)
|
529 |
|
|
{
|
530 |
|
|
/* Evaluate the lower bound. */
|
531 |
|
|
gfc_init_se (&tmpse, NULL);
|
532 |
|
|
gfc_apply_interface_mapping (mapping, &tmpse, as->lower[dim]);
|
533 |
|
|
gfc_add_block_to_block (&se->pre, &tmpse.pre);
|
534 |
|
|
gfc_add_block_to_block (&se->post, &tmpse.post);
|
535 |
|
|
lower = fold_convert (gfc_array_index_type, tmpse.expr);
|
536 |
|
|
|
537 |
|
|
/* ...and the upper bound. */
|
538 |
|
|
gfc_init_se (&tmpse, NULL);
|
539 |
|
|
gfc_apply_interface_mapping (mapping, &tmpse, as->upper[dim]);
|
540 |
|
|
gfc_add_block_to_block (&se->pre, &tmpse.pre);
|
541 |
|
|
gfc_add_block_to_block (&se->post, &tmpse.post);
|
542 |
|
|
upper = fold_convert (gfc_array_index_type, tmpse.expr);
|
543 |
|
|
|
544 |
|
|
/* Set the upper bound of the loop to UPPER - LOWER. */
|
545 |
|
|
tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, upper, lower);
|
546 |
|
|
tmp = gfc_evaluate_now (tmp, &se->pre);
|
547 |
|
|
se->loop->to[n] = tmp;
|
548 |
|
|
}
|
549 |
|
|
}
|
550 |
|
|
}
|
551 |
|
|
|
552 |
|
|
|
553 |
|
|
/* Generate code to allocate an array temporary, or create a variable to
|
554 |
|
|
hold the data. If size is NULL, zero the descriptor so that the
|
555 |
|
|
callee will allocate the array. If DEALLOC is true, also generate code to
|
556 |
|
|
free the array afterwards.
|
557 |
|
|
|
558 |
|
|
If INITIAL is not NULL, it is packed using internal_pack and the result used
|
559 |
|
|
as data instead of allocating a fresh, unitialized area of memory.
|
560 |
|
|
|
561 |
|
|
Initialization code is added to PRE and finalization code to POST.
|
562 |
|
|
DYNAMIC is true if the caller may want to extend the array later
|
563 |
|
|
using realloc. This prevents us from putting the array on the stack. */
|
564 |
|
|
|
565 |
|
|
static void
|
566 |
|
|
gfc_trans_allocate_array_storage (stmtblock_t * pre, stmtblock_t * post,
|
567 |
|
|
gfc_ss_info * info, tree size, tree nelem,
|
568 |
|
|
tree initial, bool dynamic, bool dealloc)
|
569 |
|
|
{
|
570 |
|
|
tree tmp;
|
571 |
|
|
tree desc;
|
572 |
|
|
bool onstack;
|
573 |
|
|
|
574 |
|
|
desc = info->descriptor;
|
575 |
|
|
info->offset = gfc_index_zero_node;
|
576 |
|
|
if (size == NULL_TREE || integer_zerop (size))
|
577 |
|
|
{
|
578 |
|
|
/* A callee allocated array. */
|
579 |
|
|
gfc_conv_descriptor_data_set (pre, desc, null_pointer_node);
|
580 |
|
|
onstack = FALSE;
|
581 |
|
|
}
|
582 |
|
|
else
|
583 |
|
|
{
|
584 |
|
|
/* Allocate the temporary. */
|
585 |
|
|
onstack = !dynamic && initial == NULL_TREE
|
586 |
|
|
&& gfc_can_put_var_on_stack (size);
|
587 |
|
|
|
588 |
|
|
if (onstack)
|
589 |
|
|
{
|
590 |
|
|
/* Make a temporary variable to hold the data. */
|
591 |
|
|
tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (nelem), nelem,
|
592 |
|
|
gfc_index_one_node);
|
593 |
|
|
tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node,
|
594 |
|
|
tmp);
|
595 |
|
|
tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)),
|
596 |
|
|
tmp);
|
597 |
|
|
tmp = gfc_create_var (tmp, "A");
|
598 |
|
|
tmp = gfc_build_addr_expr (NULL_TREE, tmp);
|
599 |
|
|
gfc_conv_descriptor_data_set (pre, desc, tmp);
|
600 |
|
|
}
|
601 |
|
|
else
|
602 |
|
|
{
|
603 |
|
|
/* Allocate memory to hold the data or call internal_pack. */
|
604 |
|
|
if (initial == NULL_TREE)
|
605 |
|
|
{
|
606 |
|
|
tmp = gfc_call_malloc (pre, NULL, size);
|
607 |
|
|
tmp = gfc_evaluate_now (tmp, pre);
|
608 |
|
|
}
|
609 |
|
|
else
|
610 |
|
|
{
|
611 |
|
|
tree packed;
|
612 |
|
|
tree source_data;
|
613 |
|
|
tree was_packed;
|
614 |
|
|
stmtblock_t do_copying;
|
615 |
|
|
|
616 |
|
|
tmp = TREE_TYPE (initial); /* Pointer to descriptor. */
|
617 |
|
|
gcc_assert (TREE_CODE (tmp) == POINTER_TYPE);
|
618 |
|
|
tmp = TREE_TYPE (tmp); /* The descriptor itself. */
|
619 |
|
|
tmp = gfc_get_element_type (tmp);
|
620 |
|
|
gcc_assert (tmp == gfc_get_element_type (TREE_TYPE (desc)));
|
621 |
|
|
packed = gfc_create_var (build_pointer_type (tmp), "data");
|
622 |
|
|
|
623 |
|
|
tmp = build_call_expr_loc (input_location,
|
624 |
|
|
gfor_fndecl_in_pack, 1, initial);
|
625 |
|
|
tmp = fold_convert (TREE_TYPE (packed), tmp);
|
626 |
|
|
gfc_add_modify (pre, packed, tmp);
|
627 |
|
|
|
628 |
|
|
tmp = build_fold_indirect_ref_loc (input_location,
|
629 |
|
|
initial);
|
630 |
|
|
source_data = gfc_conv_descriptor_data_get (tmp);
|
631 |
|
|
|
632 |
|
|
/* internal_pack may return source->data without any allocation
|
633 |
|
|
or copying if it is already packed. If that's the case, we
|
634 |
|
|
need to allocate and copy manually. */
|
635 |
|
|
|
636 |
|
|
gfc_start_block (&do_copying);
|
637 |
|
|
tmp = gfc_call_malloc (&do_copying, NULL, size);
|
638 |
|
|
tmp = fold_convert (TREE_TYPE (packed), tmp);
|
639 |
|
|
gfc_add_modify (&do_copying, packed, tmp);
|
640 |
|
|
tmp = gfc_build_memcpy_call (packed, source_data, size);
|
641 |
|
|
gfc_add_expr_to_block (&do_copying, tmp);
|
642 |
|
|
|
643 |
|
|
was_packed = fold_build2 (EQ_EXPR, boolean_type_node,
|
644 |
|
|
packed, source_data);
|
645 |
|
|
tmp = gfc_finish_block (&do_copying);
|
646 |
|
|
tmp = build3_v (COND_EXPR, was_packed, tmp,
|
647 |
|
|
build_empty_stmt (input_location));
|
648 |
|
|
gfc_add_expr_to_block (pre, tmp);
|
649 |
|
|
|
650 |
|
|
tmp = fold_convert (pvoid_type_node, packed);
|
651 |
|
|
}
|
652 |
|
|
|
653 |
|
|
gfc_conv_descriptor_data_set (pre, desc, tmp);
|
654 |
|
|
}
|
655 |
|
|
}
|
656 |
|
|
info->data = gfc_conv_descriptor_data_get (desc);
|
657 |
|
|
|
658 |
|
|
/* The offset is zero because we create temporaries with a zero
|
659 |
|
|
lower bound. */
|
660 |
|
|
gfc_conv_descriptor_offset_set (pre, desc, gfc_index_zero_node);
|
661 |
|
|
|
662 |
|
|
if (dealloc && !onstack)
|
663 |
|
|
{
|
664 |
|
|
/* Free the temporary. */
|
665 |
|
|
tmp = gfc_conv_descriptor_data_get (desc);
|
666 |
|
|
tmp = gfc_call_free (fold_convert (pvoid_type_node, tmp));
|
667 |
|
|
gfc_add_expr_to_block (post, tmp);
|
668 |
|
|
}
|
669 |
|
|
}
|
670 |
|
|
|
671 |
|
|
|
672 |
|
|
/* Generate code to create and initialize the descriptor for a temporary
|
673 |
|
|
array. This is used for both temporaries needed by the scalarizer, and
|
674 |
|
|
functions returning arrays. Adjusts the loop variables to be
|
675 |
|
|
zero-based, and calculates the loop bounds for callee allocated arrays.
|
676 |
|
|
Allocate the array unless it's callee allocated (we have a callee
|
677 |
|
|
allocated array if 'callee_alloc' is true, or if loop->to[n] is
|
678 |
|
|
NULL_TREE for any n). Also fills in the descriptor, data and offset
|
679 |
|
|
fields of info if known. Returns the size of the array, or NULL for a
|
680 |
|
|
callee allocated array.
|
681 |
|
|
|
682 |
|
|
PRE, POST, INITIAL, DYNAMIC and DEALLOC are as for
|
683 |
|
|
gfc_trans_allocate_array_storage.
|
684 |
|
|
*/
|
685 |
|
|
|
686 |
|
|
tree
|
687 |
|
|
gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post,
|
688 |
|
|
gfc_loopinfo * loop, gfc_ss_info * info,
|
689 |
|
|
tree eltype, tree initial, bool dynamic,
|
690 |
|
|
bool dealloc, bool callee_alloc, locus * where)
|
691 |
|
|
{
|
692 |
|
|
tree type;
|
693 |
|
|
tree desc;
|
694 |
|
|
tree tmp;
|
695 |
|
|
tree size;
|
696 |
|
|
tree nelem;
|
697 |
|
|
tree cond;
|
698 |
|
|
tree or_expr;
|
699 |
|
|
int n;
|
700 |
|
|
int dim;
|
701 |
|
|
|
702 |
|
|
gcc_assert (info->dimen > 0);
|
703 |
|
|
|
704 |
|
|
if (gfc_option.warn_array_temp && where)
|
705 |
|
|
gfc_warning ("Creating array temporary at %L", where);
|
706 |
|
|
|
707 |
|
|
/* Set the lower bound to zero. */
|
708 |
|
|
for (dim = 0; dim < info->dimen; dim++)
|
709 |
|
|
{
|
710 |
|
|
n = loop->order[dim];
|
711 |
|
|
/* Callee allocated arrays may not have a known bound yet. */
|
712 |
|
|
if (loop->to[n])
|
713 |
|
|
loop->to[n] = gfc_evaluate_now (fold_build2 (MINUS_EXPR,
|
714 |
|
|
gfc_array_index_type,
|
715 |
|
|
loop->to[n], loop->from[n]), pre);
|
716 |
|
|
loop->from[n] = gfc_index_zero_node;
|
717 |
|
|
|
718 |
|
|
info->delta[dim] = gfc_index_zero_node;
|
719 |
|
|
info->start[dim] = gfc_index_zero_node;
|
720 |
|
|
info->end[dim] = gfc_index_zero_node;
|
721 |
|
|
info->stride[dim] = gfc_index_one_node;
|
722 |
|
|
info->dim[dim] = dim;
|
723 |
|
|
}
|
724 |
|
|
|
725 |
|
|
/* Initialize the descriptor. */
|
726 |
|
|
type =
|
727 |
|
|
gfc_get_array_type_bounds (eltype, info->dimen, loop->from, loop->to, 1,
|
728 |
|
|
GFC_ARRAY_UNKNOWN, true);
|
729 |
|
|
desc = gfc_create_var (type, "atmp");
|
730 |
|
|
GFC_DECL_PACKED_ARRAY (desc) = 1;
|
731 |
|
|
|
732 |
|
|
info->descriptor = desc;
|
733 |
|
|
size = gfc_index_one_node;
|
734 |
|
|
|
735 |
|
|
/* Fill in the array dtype. */
|
736 |
|
|
tmp = gfc_conv_descriptor_dtype (desc);
|
737 |
|
|
gfc_add_modify (pre, tmp, gfc_get_dtype (TREE_TYPE (desc)));
|
738 |
|
|
|
739 |
|
|
/*
|
740 |
|
|
Fill in the bounds and stride. This is a packed array, so:
|
741 |
|
|
|
742 |
|
|
size = 1;
|
743 |
|
|
for (n = 0; n < rank; n++)
|
744 |
|
|
{
|
745 |
|
|
stride[n] = size
|
746 |
|
|
delta = ubound[n] + 1 - lbound[n];
|
747 |
|
|
size = size * delta;
|
748 |
|
|
}
|
749 |
|
|
size = size * sizeof(element);
|
750 |
|
|
*/
|
751 |
|
|
|
752 |
|
|
or_expr = NULL_TREE;
|
753 |
|
|
|
754 |
|
|
/* If there is at least one null loop->to[n], it is a callee allocated
|
755 |
|
|
array. */
|
756 |
|
|
for (n = 0; n < info->dimen; n++)
|
757 |
|
|
if (loop->to[n] == NULL_TREE)
|
758 |
|
|
{
|
759 |
|
|
size = NULL_TREE;
|
760 |
|
|
break;
|
761 |
|
|
}
|
762 |
|
|
|
763 |
|
|
for (n = 0; n < info->dimen; n++)
|
764 |
|
|
{
|
765 |
|
|
if (size == NULL_TREE)
|
766 |
|
|
{
|
767 |
|
|
/* For a callee allocated array express the loop bounds in terms
|
768 |
|
|
of the descriptor fields. */
|
769 |
|
|
tmp =
|
770 |
|
|
fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
771 |
|
|
gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[n]),
|
772 |
|
|
gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]));
|
773 |
|
|
loop->to[n] = tmp;
|
774 |
|
|
continue;
|
775 |
|
|
}
|
776 |
|
|
|
777 |
|
|
/* Store the stride and bound components in the descriptor. */
|
778 |
|
|
gfc_conv_descriptor_stride_set (pre, desc, gfc_rank_cst[n], size);
|
779 |
|
|
|
780 |
|
|
gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n],
|
781 |
|
|
gfc_index_zero_node);
|
782 |
|
|
|
783 |
|
|
gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n], loop->to[n]);
|
784 |
|
|
|
785 |
|
|
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
786 |
|
|
loop->to[n], gfc_index_one_node);
|
787 |
|
|
|
788 |
|
|
/* Check whether the size for this dimension is negative. */
|
789 |
|
|
cond = fold_build2 (LE_EXPR, boolean_type_node, tmp,
|
790 |
|
|
gfc_index_zero_node);
|
791 |
|
|
cond = gfc_evaluate_now (cond, pre);
|
792 |
|
|
|
793 |
|
|
if (n == 0)
|
794 |
|
|
or_expr = cond;
|
795 |
|
|
else
|
796 |
|
|
or_expr = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, or_expr, cond);
|
797 |
|
|
|
798 |
|
|
size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
|
799 |
|
|
size = gfc_evaluate_now (size, pre);
|
800 |
|
|
}
|
801 |
|
|
|
802 |
|
|
/* Get the size of the array. */
|
803 |
|
|
|
804 |
|
|
if (size && !callee_alloc)
|
805 |
|
|
{
|
806 |
|
|
/* If or_expr is true, then the extent in at least one
|
807 |
|
|
dimension is zero and the size is set to zero. */
|
808 |
|
|
size = fold_build3 (COND_EXPR, gfc_array_index_type,
|
809 |
|
|
or_expr, gfc_index_zero_node, size);
|
810 |
|
|
|
811 |
|
|
nelem = size;
|
812 |
|
|
size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
|
813 |
|
|
fold_convert (gfc_array_index_type,
|
814 |
|
|
TYPE_SIZE_UNIT (gfc_get_element_type (type))));
|
815 |
|
|
}
|
816 |
|
|
else
|
817 |
|
|
{
|
818 |
|
|
nelem = size;
|
819 |
|
|
size = NULL_TREE;
|
820 |
|
|
}
|
821 |
|
|
|
822 |
|
|
gfc_trans_allocate_array_storage (pre, post, info, size, nelem, initial,
|
823 |
|
|
dynamic, dealloc);
|
824 |
|
|
|
825 |
|
|
if (info->dimen > loop->temp_dim)
|
826 |
|
|
loop->temp_dim = info->dimen;
|
827 |
|
|
|
828 |
|
|
return size;
|
829 |
|
|
}
|
830 |
|
|
|
831 |
|
|
|
832 |
|
|
/* Generate code to transpose array EXPR by creating a new descriptor
|
833 |
|
|
in which the dimension specifications have been reversed. */
|
834 |
|
|
|
835 |
|
|
void
|
836 |
|
|
gfc_conv_array_transpose (gfc_se * se, gfc_expr * expr)
|
837 |
|
|
{
|
838 |
|
|
tree dest, src, dest_index, src_index;
|
839 |
|
|
gfc_loopinfo *loop;
|
840 |
|
|
gfc_ss_info *dest_info;
|
841 |
|
|
gfc_ss *dest_ss, *src_ss;
|
842 |
|
|
gfc_se src_se;
|
843 |
|
|
int n;
|
844 |
|
|
|
845 |
|
|
loop = se->loop;
|
846 |
|
|
|
847 |
|
|
src_ss = gfc_walk_expr (expr);
|
848 |
|
|
dest_ss = se->ss;
|
849 |
|
|
|
850 |
|
|
dest_info = &dest_ss->data.info;
|
851 |
|
|
gcc_assert (dest_info->dimen == 2);
|
852 |
|
|
|
853 |
|
|
/* Get a descriptor for EXPR. */
|
854 |
|
|
gfc_init_se (&src_se, NULL);
|
855 |
|
|
gfc_conv_expr_descriptor (&src_se, expr, src_ss);
|
856 |
|
|
gfc_add_block_to_block (&se->pre, &src_se.pre);
|
857 |
|
|
gfc_add_block_to_block (&se->post, &src_se.post);
|
858 |
|
|
src = src_se.expr;
|
859 |
|
|
|
860 |
|
|
/* Allocate a new descriptor for the return value. */
|
861 |
|
|
dest = gfc_create_var (TREE_TYPE (src), "atmp");
|
862 |
|
|
dest_info->descriptor = dest;
|
863 |
|
|
se->expr = dest;
|
864 |
|
|
|
865 |
|
|
/* Copy across the dtype field. */
|
866 |
|
|
gfc_add_modify (&se->pre,
|
867 |
|
|
gfc_conv_descriptor_dtype (dest),
|
868 |
|
|
gfc_conv_descriptor_dtype (src));
|
869 |
|
|
|
870 |
|
|
/* Copy the dimension information, renumbering dimension 1 to 0 and
|
871 |
|
|
|
872 |
|
|
for (n = 0; n < 2; n++)
|
873 |
|
|
{
|
874 |
|
|
dest_info->delta[n] = gfc_index_zero_node;
|
875 |
|
|
dest_info->start[n] = gfc_index_zero_node;
|
876 |
|
|
dest_info->end[n] = gfc_index_zero_node;
|
877 |
|
|
dest_info->stride[n] = gfc_index_one_node;
|
878 |
|
|
dest_info->dim[n] = n;
|
879 |
|
|
|
880 |
|
|
dest_index = gfc_rank_cst[n];
|
881 |
|
|
src_index = gfc_rank_cst[1 - n];
|
882 |
|
|
|
883 |
|
|
gfc_conv_descriptor_stride_set (&se->pre, dest, dest_index,
|
884 |
|
|
gfc_conv_descriptor_stride_get (src, src_index));
|
885 |
|
|
|
886 |
|
|
gfc_conv_descriptor_lbound_set (&se->pre, dest, dest_index,
|
887 |
|
|
gfc_conv_descriptor_lbound_get (src, src_index));
|
888 |
|
|
|
889 |
|
|
gfc_conv_descriptor_ubound_set (&se->pre, dest, dest_index,
|
890 |
|
|
gfc_conv_descriptor_ubound_get (src, src_index));
|
891 |
|
|
|
892 |
|
|
if (!loop->to[n])
|
893 |
|
|
{
|
894 |
|
|
gcc_assert (integer_zerop (loop->from[n]));
|
895 |
|
|
loop->to[n] =
|
896 |
|
|
fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
897 |
|
|
gfc_conv_descriptor_ubound_get (dest, dest_index),
|
898 |
|
|
gfc_conv_descriptor_lbound_get (dest, dest_index));
|
899 |
|
|
}
|
900 |
|
|
}
|
901 |
|
|
|
902 |
|
|
/* Copy the data pointer. */
|
903 |
|
|
dest_info->data = gfc_conv_descriptor_data_get (src);
|
904 |
|
|
gfc_conv_descriptor_data_set (&se->pre, dest, dest_info->data);
|
905 |
|
|
|
906 |
|
|
/* Copy the offset. This is not changed by transposition; the top-left
|
907 |
|
|
element is still at the same offset as before, except where the loop
|
908 |
|
|
starts at zero. */
|
909 |
|
|
if (!integer_zerop (loop->from[0]))
|
910 |
|
|
dest_info->offset = gfc_conv_descriptor_offset_get (src);
|
911 |
|
|
else
|
912 |
|
|
dest_info->offset = gfc_index_zero_node;
|
913 |
|
|
|
914 |
|
|
gfc_conv_descriptor_offset_set (&se->pre, dest,
|
915 |
|
|
dest_info->offset);
|
916 |
|
|
|
917 |
|
|
if (dest_info->dimen > loop->temp_dim)
|
918 |
|
|
loop->temp_dim = dest_info->dimen;
|
919 |
|
|
}
|
920 |
|
|
|
921 |
|
|
|
922 |
|
|
/* Return the number of iterations in a loop that starts at START,
|
923 |
|
|
ends at END, and has step STEP. */
|
924 |
|
|
|
925 |
|
|
static tree
|
926 |
|
|
gfc_get_iteration_count (tree start, tree end, tree step)
|
927 |
|
|
{
|
928 |
|
|
tree tmp;
|
929 |
|
|
tree type;
|
930 |
|
|
|
931 |
|
|
type = TREE_TYPE (step);
|
932 |
|
|
tmp = fold_build2 (MINUS_EXPR, type, end, start);
|
933 |
|
|
tmp = fold_build2 (FLOOR_DIV_EXPR, type, tmp, step);
|
934 |
|
|
tmp = fold_build2 (PLUS_EXPR, type, tmp, build_int_cst (type, 1));
|
935 |
|
|
tmp = fold_build2 (MAX_EXPR, type, tmp, build_int_cst (type, 0));
|
936 |
|
|
return fold_convert (gfc_array_index_type, tmp);
|
937 |
|
|
}
|
938 |
|
|
|
939 |
|
|
|
940 |
|
|
/* Extend the data in array DESC by EXTRA elements. */
|
941 |
|
|
|
942 |
|
|
static void
|
943 |
|
|
gfc_grow_array (stmtblock_t * pblock, tree desc, tree extra)
|
944 |
|
|
{
|
945 |
|
|
tree arg0, arg1;
|
946 |
|
|
tree tmp;
|
947 |
|
|
tree size;
|
948 |
|
|
tree ubound;
|
949 |
|
|
|
950 |
|
|
if (integer_zerop (extra))
|
951 |
|
|
return;
|
952 |
|
|
|
953 |
|
|
ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[0]);
|
954 |
|
|
|
955 |
|
|
/* Add EXTRA to the upper bound. */
|
956 |
|
|
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, ubound, extra);
|
957 |
|
|
gfc_conv_descriptor_ubound_set (pblock, desc, gfc_rank_cst[0], tmp);
|
958 |
|
|
|
959 |
|
|
/* Get the value of the current data pointer. */
|
960 |
|
|
arg0 = gfc_conv_descriptor_data_get (desc);
|
961 |
|
|
|
962 |
|
|
/* Calculate the new array size. */
|
963 |
|
|
size = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
|
964 |
|
|
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
965 |
|
|
ubound, gfc_index_one_node);
|
966 |
|
|
arg1 = fold_build2 (MULT_EXPR, size_type_node,
|
967 |
|
|
fold_convert (size_type_node, tmp),
|
968 |
|
|
fold_convert (size_type_node, size));
|
969 |
|
|
|
970 |
|
|
/* Call the realloc() function. */
|
971 |
|
|
tmp = gfc_call_realloc (pblock, arg0, arg1);
|
972 |
|
|
gfc_conv_descriptor_data_set (pblock, desc, tmp);
|
973 |
|
|
}
|
974 |
|
|
|
975 |
|
|
|
976 |
|
|
/* Return true if the bounds of iterator I can only be determined
|
977 |
|
|
at run time. */
|
978 |
|
|
|
979 |
|
|
static inline bool
|
980 |
|
|
gfc_iterator_has_dynamic_bounds (gfc_iterator * i)
|
981 |
|
|
{
|
982 |
|
|
return (i->start->expr_type != EXPR_CONSTANT
|
983 |
|
|
|| i->end->expr_type != EXPR_CONSTANT
|
984 |
|
|
|| i->step->expr_type != EXPR_CONSTANT);
|
985 |
|
|
}
|
986 |
|
|
|
987 |
|
|
|
988 |
|
|
/* Split the size of constructor element EXPR into the sum of two terms,
|
989 |
|
|
one of which can be determined at compile time and one of which must
|
990 |
|
|
be calculated at run time. Set *SIZE to the former and return true
|
991 |
|
|
if the latter might be nonzero. */
|
992 |
|
|
|
993 |
|
|
static bool
|
994 |
|
|
gfc_get_array_constructor_element_size (mpz_t * size, gfc_expr * expr)
|
995 |
|
|
{
|
996 |
|
|
if (expr->expr_type == EXPR_ARRAY)
|
997 |
|
|
return gfc_get_array_constructor_size (size, expr->value.constructor);
|
998 |
|
|
else if (expr->rank > 0)
|
999 |
|
|
{
|
1000 |
|
|
/* Calculate everything at run time. */
|
1001 |
|
|
mpz_set_ui (*size, 0);
|
1002 |
|
|
return true;
|
1003 |
|
|
}
|
1004 |
|
|
else
|
1005 |
|
|
{
|
1006 |
|
|
/* A single element. */
|
1007 |
|
|
mpz_set_ui (*size, 1);
|
1008 |
|
|
return false;
|
1009 |
|
|
}
|
1010 |
|
|
}
|
1011 |
|
|
|
1012 |
|
|
|
1013 |
|
|
/* Like gfc_get_array_constructor_element_size, but applied to the whole
|
1014 |
|
|
of array constructor C. */
|
1015 |
|
|
|
1016 |
|
|
static bool
|
1017 |
|
|
gfc_get_array_constructor_size (mpz_t * size, gfc_constructor * c)
|
1018 |
|
|
{
|
1019 |
|
|
gfc_iterator *i;
|
1020 |
|
|
mpz_t val;
|
1021 |
|
|
mpz_t len;
|
1022 |
|
|
bool dynamic;
|
1023 |
|
|
|
1024 |
|
|
mpz_set_ui (*size, 0);
|
1025 |
|
|
mpz_init (len);
|
1026 |
|
|
mpz_init (val);
|
1027 |
|
|
|
1028 |
|
|
dynamic = false;
|
1029 |
|
|
for (; c; c = c->next)
|
1030 |
|
|
{
|
1031 |
|
|
i = c->iterator;
|
1032 |
|
|
if (i && gfc_iterator_has_dynamic_bounds (i))
|
1033 |
|
|
dynamic = true;
|
1034 |
|
|
else
|
1035 |
|
|
{
|
1036 |
|
|
dynamic |= gfc_get_array_constructor_element_size (&len, c->expr);
|
1037 |
|
|
if (i)
|
1038 |
|
|
{
|
1039 |
|
|
/* Multiply the static part of the element size by the
|
1040 |
|
|
number of iterations. */
|
1041 |
|
|
mpz_sub (val, i->end->value.integer, i->start->value.integer);
|
1042 |
|
|
mpz_fdiv_q (val, val, i->step->value.integer);
|
1043 |
|
|
mpz_add_ui (val, val, 1);
|
1044 |
|
|
if (mpz_sgn (val) > 0)
|
1045 |
|
|
mpz_mul (len, len, val);
|
1046 |
|
|
else
|
1047 |
|
|
mpz_set_ui (len, 0);
|
1048 |
|
|
}
|
1049 |
|
|
mpz_add (*size, *size, len);
|
1050 |
|
|
}
|
1051 |
|
|
}
|
1052 |
|
|
mpz_clear (len);
|
1053 |
|
|
mpz_clear (val);
|
1054 |
|
|
return dynamic;
|
1055 |
|
|
}
|
1056 |
|
|
|
1057 |
|
|
|
1058 |
|
|
/* Make sure offset is a variable. */
|
1059 |
|
|
|
1060 |
|
|
static void
|
1061 |
|
|
gfc_put_offset_into_var (stmtblock_t * pblock, tree * poffset,
|
1062 |
|
|
tree * offsetvar)
|
1063 |
|
|
{
|
1064 |
|
|
/* We should have already created the offset variable. We cannot
|
1065 |
|
|
create it here because we may be in an inner scope. */
|
1066 |
|
|
gcc_assert (*offsetvar != NULL_TREE);
|
1067 |
|
|
gfc_add_modify (pblock, *offsetvar, *poffset);
|
1068 |
|
|
*poffset = *offsetvar;
|
1069 |
|
|
TREE_USED (*offsetvar) = 1;
|
1070 |
|
|
}
|
1071 |
|
|
|
1072 |
|
|
|
1073 |
|
|
/* Variables needed for bounds-checking. */
|
1074 |
|
|
static bool first_len;
|
1075 |
|
|
static tree first_len_val;
|
1076 |
|
|
static bool typespec_chararray_ctor;
|
1077 |
|
|
|
1078 |
|
|
static void
|
1079 |
|
|
gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
|
1080 |
|
|
tree offset, gfc_se * se, gfc_expr * expr)
|
1081 |
|
|
{
|
1082 |
|
|
tree tmp;
|
1083 |
|
|
|
1084 |
|
|
gfc_conv_expr (se, expr);
|
1085 |
|
|
|
1086 |
|
|
/* Store the value. */
|
1087 |
|
|
tmp = build_fold_indirect_ref_loc (input_location,
|
1088 |
|
|
gfc_conv_descriptor_data_get (desc));
|
1089 |
|
|
tmp = gfc_build_array_ref (tmp, offset, NULL);
|
1090 |
|
|
|
1091 |
|
|
if (expr->ts.type == BT_CHARACTER)
|
1092 |
|
|
{
|
1093 |
|
|
int i = gfc_validate_kind (BT_CHARACTER, expr->ts.kind, false);
|
1094 |
|
|
tree esize;
|
1095 |
|
|
|
1096 |
|
|
esize = size_in_bytes (gfc_get_element_type (TREE_TYPE (desc)));
|
1097 |
|
|
esize = fold_convert (gfc_charlen_type_node, esize);
|
1098 |
|
|
esize = fold_build2 (TRUNC_DIV_EXPR, gfc_charlen_type_node, esize,
|
1099 |
|
|
build_int_cst (gfc_charlen_type_node,
|
1100 |
|
|
gfc_character_kinds[i].bit_size / 8));
|
1101 |
|
|
|
1102 |
|
|
gfc_conv_string_parameter (se);
|
1103 |
|
|
if (POINTER_TYPE_P (TREE_TYPE (tmp)))
|
1104 |
|
|
{
|
1105 |
|
|
/* The temporary is an array of pointers. */
|
1106 |
|
|
se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
|
1107 |
|
|
gfc_add_modify (&se->pre, tmp, se->expr);
|
1108 |
|
|
}
|
1109 |
|
|
else
|
1110 |
|
|
{
|
1111 |
|
|
/* The temporary is an array of string values. */
|
1112 |
|
|
tmp = gfc_build_addr_expr (gfc_get_pchar_type (expr->ts.kind), tmp);
|
1113 |
|
|
/* We know the temporary and the value will be the same length,
|
1114 |
|
|
so can use memcpy. */
|
1115 |
|
|
gfc_trans_string_copy (&se->pre, esize, tmp, expr->ts.kind,
|
1116 |
|
|
se->string_length, se->expr, expr->ts.kind);
|
1117 |
|
|
}
|
1118 |
|
|
if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) && !typespec_chararray_ctor)
|
1119 |
|
|
{
|
1120 |
|
|
if (first_len)
|
1121 |
|
|
{
|
1122 |
|
|
gfc_add_modify (&se->pre, first_len_val,
|
1123 |
|
|
se->string_length);
|
1124 |
|
|
first_len = false;
|
1125 |
|
|
}
|
1126 |
|
|
else
|
1127 |
|
|
{
|
1128 |
|
|
/* Verify that all constructor elements are of the same
|
1129 |
|
|
length. */
|
1130 |
|
|
tree cond = fold_build2 (NE_EXPR, boolean_type_node,
|
1131 |
|
|
first_len_val, se->string_length);
|
1132 |
|
|
gfc_trans_runtime_check
|
1133 |
|
|
(true, false, cond, &se->pre, &expr->where,
|
1134 |
|
|
"Different CHARACTER lengths (%ld/%ld) in array constructor",
|
1135 |
|
|
fold_convert (long_integer_type_node, first_len_val),
|
1136 |
|
|
fold_convert (long_integer_type_node, se->string_length));
|
1137 |
|
|
}
|
1138 |
|
|
}
|
1139 |
|
|
}
|
1140 |
|
|
else
|
1141 |
|
|
{
|
1142 |
|
|
/* TODO: Should the frontend already have done this conversion? */
|
1143 |
|
|
se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
|
1144 |
|
|
gfc_add_modify (&se->pre, tmp, se->expr);
|
1145 |
|
|
}
|
1146 |
|
|
|
1147 |
|
|
gfc_add_block_to_block (pblock, &se->pre);
|
1148 |
|
|
gfc_add_block_to_block (pblock, &se->post);
|
1149 |
|
|
}
|
1150 |
|
|
|
1151 |
|
|
|
1152 |
|
|
/* Add the contents of an array to the constructor. DYNAMIC is as for
|
1153 |
|
|
gfc_trans_array_constructor_value. */
|
1154 |
|
|
|
1155 |
|
|
static void
|
1156 |
|
|
gfc_trans_array_constructor_subarray (stmtblock_t * pblock,
|
1157 |
|
|
tree type ATTRIBUTE_UNUSED,
|
1158 |
|
|
tree desc, gfc_expr * expr,
|
1159 |
|
|
tree * poffset, tree * offsetvar,
|
1160 |
|
|
bool dynamic)
|
1161 |
|
|
{
|
1162 |
|
|
gfc_se se;
|
1163 |
|
|
gfc_ss *ss;
|
1164 |
|
|
gfc_loopinfo loop;
|
1165 |
|
|
stmtblock_t body;
|
1166 |
|
|
tree tmp;
|
1167 |
|
|
tree size;
|
1168 |
|
|
int n;
|
1169 |
|
|
|
1170 |
|
|
/* We need this to be a variable so we can increment it. */
|
1171 |
|
|
gfc_put_offset_into_var (pblock, poffset, offsetvar);
|
1172 |
|
|
|
1173 |
|
|
gfc_init_se (&se, NULL);
|
1174 |
|
|
|
1175 |
|
|
/* Walk the array expression. */
|
1176 |
|
|
ss = gfc_walk_expr (expr);
|
1177 |
|
|
gcc_assert (ss != gfc_ss_terminator);
|
1178 |
|
|
|
1179 |
|
|
/* Initialize the scalarizer. */
|
1180 |
|
|
gfc_init_loopinfo (&loop);
|
1181 |
|
|
gfc_add_ss_to_loop (&loop, ss);
|
1182 |
|
|
|
1183 |
|
|
/* Initialize the loop. */
|
1184 |
|
|
gfc_conv_ss_startstride (&loop);
|
1185 |
|
|
gfc_conv_loop_setup (&loop, &expr->where);
|
1186 |
|
|
|
1187 |
|
|
/* Make sure the constructed array has room for the new data. */
|
1188 |
|
|
if (dynamic)
|
1189 |
|
|
{
|
1190 |
|
|
/* Set SIZE to the total number of elements in the subarray. */
|
1191 |
|
|
size = gfc_index_one_node;
|
1192 |
|
|
for (n = 0; n < loop.dimen; n++)
|
1193 |
|
|
{
|
1194 |
|
|
tmp = gfc_get_iteration_count (loop.from[n], loop.to[n],
|
1195 |
|
|
gfc_index_one_node);
|
1196 |
|
|
size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
|
1197 |
|
|
}
|
1198 |
|
|
|
1199 |
|
|
/* Grow the constructed array by SIZE elements. */
|
1200 |
|
|
gfc_grow_array (&loop.pre, desc, size);
|
1201 |
|
|
}
|
1202 |
|
|
|
1203 |
|
|
/* Make the loop body. */
|
1204 |
|
|
gfc_mark_ss_chain_used (ss, 1);
|
1205 |
|
|
gfc_start_scalarized_body (&loop, &body);
|
1206 |
|
|
gfc_copy_loopinfo_to_se (&se, &loop);
|
1207 |
|
|
se.ss = ss;
|
1208 |
|
|
|
1209 |
|
|
gfc_trans_array_ctor_element (&body, desc, *poffset, &se, expr);
|
1210 |
|
|
gcc_assert (se.ss == gfc_ss_terminator);
|
1211 |
|
|
|
1212 |
|
|
/* Increment the offset. */
|
1213 |
|
|
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
1214 |
|
|
*poffset, gfc_index_one_node);
|
1215 |
|
|
gfc_add_modify (&body, *poffset, tmp);
|
1216 |
|
|
|
1217 |
|
|
/* Finish the loop. */
|
1218 |
|
|
gfc_trans_scalarizing_loops (&loop, &body);
|
1219 |
|
|
gfc_add_block_to_block (&loop.pre, &loop.post);
|
1220 |
|
|
tmp = gfc_finish_block (&loop.pre);
|
1221 |
|
|
gfc_add_expr_to_block (pblock, tmp);
|
1222 |
|
|
|
1223 |
|
|
gfc_cleanup_loop (&loop);
|
1224 |
|
|
}
|
1225 |
|
|
|
1226 |
|
|
|
1227 |
|
|
/* Assign the values to the elements of an array constructor. DYNAMIC
|
1228 |
|
|
is true if descriptor DESC only contains enough data for the static
|
1229 |
|
|
size calculated by gfc_get_array_constructor_size. When true, memory
|
1230 |
|
|
for the dynamic parts must be allocated using realloc. */
|
1231 |
|
|
|
1232 |
|
|
static void
|
1233 |
|
|
gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
|
1234 |
|
|
tree desc, gfc_constructor * c,
|
1235 |
|
|
tree * poffset, tree * offsetvar,
|
1236 |
|
|
bool dynamic)
|
1237 |
|
|
{
|
1238 |
|
|
tree tmp;
|
1239 |
|
|
stmtblock_t body;
|
1240 |
|
|
gfc_se se;
|
1241 |
|
|
mpz_t size;
|
1242 |
|
|
|
1243 |
|
|
tree shadow_loopvar = NULL_TREE;
|
1244 |
|
|
gfc_saved_var saved_loopvar;
|
1245 |
|
|
|
1246 |
|
|
mpz_init (size);
|
1247 |
|
|
for (; c; c = c->next)
|
1248 |
|
|
{
|
1249 |
|
|
/* If this is an iterator or an array, the offset must be a variable. */
|
1250 |
|
|
if ((c->iterator || c->expr->rank > 0) && INTEGER_CST_P (*poffset))
|
1251 |
|
|
gfc_put_offset_into_var (pblock, poffset, offsetvar);
|
1252 |
|
|
|
1253 |
|
|
/* Shadowing the iterator avoids changing its value and saves us from
|
1254 |
|
|
keeping track of it. Further, it makes sure that there's always a
|
1255 |
|
|
backend-decl for the symbol, even if there wasn't one before,
|
1256 |
|
|
e.g. in the case of an iterator that appears in a specification
|
1257 |
|
|
expression in an interface mapping. */
|
1258 |
|
|
if (c->iterator)
|
1259 |
|
|
{
|
1260 |
|
|
gfc_symbol *sym = c->iterator->var->symtree->n.sym;
|
1261 |
|
|
tree type = gfc_typenode_for_spec (&sym->ts);
|
1262 |
|
|
|
1263 |
|
|
shadow_loopvar = gfc_create_var (type, "shadow_loopvar");
|
1264 |
|
|
gfc_shadow_sym (sym, shadow_loopvar, &saved_loopvar);
|
1265 |
|
|
}
|
1266 |
|
|
|
1267 |
|
|
gfc_start_block (&body);
|
1268 |
|
|
|
1269 |
|
|
if (c->expr->expr_type == EXPR_ARRAY)
|
1270 |
|
|
{
|
1271 |
|
|
/* Array constructors can be nested. */
|
1272 |
|
|
gfc_trans_array_constructor_value (&body, type, desc,
|
1273 |
|
|
c->expr->value.constructor,
|
1274 |
|
|
poffset, offsetvar, dynamic);
|
1275 |
|
|
}
|
1276 |
|
|
else if (c->expr->rank > 0)
|
1277 |
|
|
{
|
1278 |
|
|
gfc_trans_array_constructor_subarray (&body, type, desc, c->expr,
|
1279 |
|
|
poffset, offsetvar, dynamic);
|
1280 |
|
|
}
|
1281 |
|
|
else
|
1282 |
|
|
{
|
1283 |
|
|
/* This code really upsets the gimplifier so don't bother for now. */
|
1284 |
|
|
gfc_constructor *p;
|
1285 |
|
|
HOST_WIDE_INT n;
|
1286 |
|
|
HOST_WIDE_INT size;
|
1287 |
|
|
|
1288 |
|
|
p = c;
|
1289 |
|
|
n = 0;
|
1290 |
|
|
while (p && !(p->iterator || p->expr->expr_type != EXPR_CONSTANT))
|
1291 |
|
|
{
|
1292 |
|
|
p = p->next;
|
1293 |
|
|
n++;
|
1294 |
|
|
}
|
1295 |
|
|
if (n < 4)
|
1296 |
|
|
{
|
1297 |
|
|
/* Scalar values. */
|
1298 |
|
|
gfc_init_se (&se, NULL);
|
1299 |
|
|
gfc_trans_array_ctor_element (&body, desc, *poffset,
|
1300 |
|
|
&se, c->expr);
|
1301 |
|
|
|
1302 |
|
|
*poffset = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
1303 |
|
|
*poffset, gfc_index_one_node);
|
1304 |
|
|
}
|
1305 |
|
|
else
|
1306 |
|
|
{
|
1307 |
|
|
/* Collect multiple scalar constants into a constructor. */
|
1308 |
|
|
tree list;
|
1309 |
|
|
tree init;
|
1310 |
|
|
tree bound;
|
1311 |
|
|
tree tmptype;
|
1312 |
|
|
HOST_WIDE_INT idx = 0;
|
1313 |
|
|
|
1314 |
|
|
p = c;
|
1315 |
|
|
list = NULL_TREE;
|
1316 |
|
|
/* Count the number of consecutive scalar constants. */
|
1317 |
|
|
while (p && !(p->iterator
|
1318 |
|
|
|| p->expr->expr_type != EXPR_CONSTANT))
|
1319 |
|
|
{
|
1320 |
|
|
gfc_init_se (&se, NULL);
|
1321 |
|
|
gfc_conv_constant (&se, p->expr);
|
1322 |
|
|
|
1323 |
|
|
if (c->expr->ts.type != BT_CHARACTER)
|
1324 |
|
|
se.expr = fold_convert (type, se.expr);
|
1325 |
|
|
/* For constant character array constructors we build
|
1326 |
|
|
an array of pointers. */
|
1327 |
|
|
else if (POINTER_TYPE_P (type))
|
1328 |
|
|
se.expr = gfc_build_addr_expr
|
1329 |
|
|
(gfc_get_pchar_type (p->expr->ts.kind),
|
1330 |
|
|
se.expr);
|
1331 |
|
|
|
1332 |
|
|
list = tree_cons (build_int_cst (gfc_array_index_type,
|
1333 |
|
|
idx++), se.expr, list);
|
1334 |
|
|
c = p;
|
1335 |
|
|
p = p->next;
|
1336 |
|
|
}
|
1337 |
|
|
|
1338 |
|
|
bound = build_int_cst (NULL_TREE, n - 1);
|
1339 |
|
|
/* Create an array type to hold them. */
|
1340 |
|
|
tmptype = build_range_type (gfc_array_index_type,
|
1341 |
|
|
gfc_index_zero_node, bound);
|
1342 |
|
|
tmptype = build_array_type (type, tmptype);
|
1343 |
|
|
|
1344 |
|
|
init = build_constructor_from_list (tmptype, nreverse (list));
|
1345 |
|
|
TREE_CONSTANT (init) = 1;
|
1346 |
|
|
TREE_STATIC (init) = 1;
|
1347 |
|
|
/* Create a static variable to hold the data. */
|
1348 |
|
|
tmp = gfc_create_var (tmptype, "data");
|
1349 |
|
|
TREE_STATIC (tmp) = 1;
|
1350 |
|
|
TREE_CONSTANT (tmp) = 1;
|
1351 |
|
|
TREE_READONLY (tmp) = 1;
|
1352 |
|
|
DECL_INITIAL (tmp) = init;
|
1353 |
|
|
init = tmp;
|
1354 |
|
|
|
1355 |
|
|
/* Use BUILTIN_MEMCPY to assign the values. */
|
1356 |
|
|
tmp = gfc_conv_descriptor_data_get (desc);
|
1357 |
|
|
tmp = build_fold_indirect_ref_loc (input_location,
|
1358 |
|
|
tmp);
|
1359 |
|
|
tmp = gfc_build_array_ref (tmp, *poffset, NULL);
|
1360 |
|
|
tmp = gfc_build_addr_expr (NULL_TREE, tmp);
|
1361 |
|
|
init = gfc_build_addr_expr (NULL_TREE, init);
|
1362 |
|
|
|
1363 |
|
|
size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
|
1364 |
|
|
bound = build_int_cst (NULL_TREE, n * size);
|
1365 |
|
|
tmp = build_call_expr_loc (input_location,
|
1366 |
|
|
built_in_decls[BUILT_IN_MEMCPY], 3,
|
1367 |
|
|
tmp, init, bound);
|
1368 |
|
|
gfc_add_expr_to_block (&body, tmp);
|
1369 |
|
|
|
1370 |
|
|
*poffset = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
1371 |
|
|
*poffset,
|
1372 |
|
|
build_int_cst (gfc_array_index_type, n));
|
1373 |
|
|
}
|
1374 |
|
|
if (!INTEGER_CST_P (*poffset))
|
1375 |
|
|
{
|
1376 |
|
|
gfc_add_modify (&body, *offsetvar, *poffset);
|
1377 |
|
|
*poffset = *offsetvar;
|
1378 |
|
|
}
|
1379 |
|
|
}
|
1380 |
|
|
|
1381 |
|
|
/* The frontend should already have done any expansions
|
1382 |
|
|
at compile-time. */
|
1383 |
|
|
if (!c->iterator)
|
1384 |
|
|
{
|
1385 |
|
|
/* Pass the code as is. */
|
1386 |
|
|
tmp = gfc_finish_block (&body);
|
1387 |
|
|
gfc_add_expr_to_block (pblock, tmp);
|
1388 |
|
|
}
|
1389 |
|
|
else
|
1390 |
|
|
{
|
1391 |
|
|
/* Build the implied do-loop. */
|
1392 |
|
|
stmtblock_t implied_do_block;
|
1393 |
|
|
tree cond;
|
1394 |
|
|
tree end;
|
1395 |
|
|
tree step;
|
1396 |
|
|
tree exit_label;
|
1397 |
|
|
tree loopbody;
|
1398 |
|
|
tree tmp2;
|
1399 |
|
|
|
1400 |
|
|
loopbody = gfc_finish_block (&body);
|
1401 |
|
|
|
1402 |
|
|
/* Create a new block that holds the implied-do loop. A temporary
|
1403 |
|
|
loop-variable is used. */
|
1404 |
|
|
gfc_start_block(&implied_do_block);
|
1405 |
|
|
|
1406 |
|
|
/* Initialize the loop. */
|
1407 |
|
|
gfc_init_se (&se, NULL);
|
1408 |
|
|
gfc_conv_expr_val (&se, c->iterator->start);
|
1409 |
|
|
gfc_add_block_to_block (&implied_do_block, &se.pre);
|
1410 |
|
|
gfc_add_modify (&implied_do_block, shadow_loopvar, se.expr);
|
1411 |
|
|
|
1412 |
|
|
gfc_init_se (&se, NULL);
|
1413 |
|
|
gfc_conv_expr_val (&se, c->iterator->end);
|
1414 |
|
|
gfc_add_block_to_block (&implied_do_block, &se.pre);
|
1415 |
|
|
end = gfc_evaluate_now (se.expr, &implied_do_block);
|
1416 |
|
|
|
1417 |
|
|
gfc_init_se (&se, NULL);
|
1418 |
|
|
gfc_conv_expr_val (&se, c->iterator->step);
|
1419 |
|
|
gfc_add_block_to_block (&implied_do_block, &se.pre);
|
1420 |
|
|
step = gfc_evaluate_now (se.expr, &implied_do_block);
|
1421 |
|
|
|
1422 |
|
|
/* If this array expands dynamically, and the number of iterations
|
1423 |
|
|
is not constant, we won't have allocated space for the static
|
1424 |
|
|
part of C->EXPR's size. Do that now. */
|
1425 |
|
|
if (dynamic && gfc_iterator_has_dynamic_bounds (c->iterator))
|
1426 |
|
|
{
|
1427 |
|
|
/* Get the number of iterations. */
|
1428 |
|
|
tmp = gfc_get_iteration_count (shadow_loopvar, end, step);
|
1429 |
|
|
|
1430 |
|
|
/* Get the static part of C->EXPR's size. */
|
1431 |
|
|
gfc_get_array_constructor_element_size (&size, c->expr);
|
1432 |
|
|
tmp2 = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
|
1433 |
|
|
|
1434 |
|
|
/* Grow the array by TMP * TMP2 elements. */
|
1435 |
|
|
tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, tmp, tmp2);
|
1436 |
|
|
gfc_grow_array (&implied_do_block, desc, tmp);
|
1437 |
|
|
}
|
1438 |
|
|
|
1439 |
|
|
/* Generate the loop body. */
|
1440 |
|
|
exit_label = gfc_build_label_decl (NULL_TREE);
|
1441 |
|
|
gfc_start_block (&body);
|
1442 |
|
|
|
1443 |
|
|
/* Generate the exit condition. Depending on the sign of
|
1444 |
|
|
the step variable we have to generate the correct
|
1445 |
|
|
comparison. */
|
1446 |
|
|
tmp = fold_build2 (GT_EXPR, boolean_type_node, step,
|
1447 |
|
|
build_int_cst (TREE_TYPE (step), 0));
|
1448 |
|
|
cond = fold_build3 (COND_EXPR, boolean_type_node, tmp,
|
1449 |
|
|
fold_build2 (GT_EXPR, boolean_type_node,
|
1450 |
|
|
shadow_loopvar, end),
|
1451 |
|
|
fold_build2 (LT_EXPR, boolean_type_node,
|
1452 |
|
|
shadow_loopvar, end));
|
1453 |
|
|
tmp = build1_v (GOTO_EXPR, exit_label);
|
1454 |
|
|
TREE_USED (exit_label) = 1;
|
1455 |
|
|
tmp = build3_v (COND_EXPR, cond, tmp,
|
1456 |
|
|
build_empty_stmt (input_location));
|
1457 |
|
|
gfc_add_expr_to_block (&body, tmp);
|
1458 |
|
|
|
1459 |
|
|
/* The main loop body. */
|
1460 |
|
|
gfc_add_expr_to_block (&body, loopbody);
|
1461 |
|
|
|
1462 |
|
|
/* Increase loop variable by step. */
|
1463 |
|
|
tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (shadow_loopvar), shadow_loopvar, step);
|
1464 |
|
|
gfc_add_modify (&body, shadow_loopvar, tmp);
|
1465 |
|
|
|
1466 |
|
|
/* Finish the loop. */
|
1467 |
|
|
tmp = gfc_finish_block (&body);
|
1468 |
|
|
tmp = build1_v (LOOP_EXPR, tmp);
|
1469 |
|
|
gfc_add_expr_to_block (&implied_do_block, tmp);
|
1470 |
|
|
|
1471 |
|
|
/* Add the exit label. */
|
1472 |
|
|
tmp = build1_v (LABEL_EXPR, exit_label);
|
1473 |
|
|
gfc_add_expr_to_block (&implied_do_block, tmp);
|
1474 |
|
|
|
1475 |
|
|
/* Finishe the implied-do loop. */
|
1476 |
|
|
tmp = gfc_finish_block(&implied_do_block);
|
1477 |
|
|
gfc_add_expr_to_block(pblock, tmp);
|
1478 |
|
|
|
1479 |
|
|
gfc_restore_sym (c->iterator->var->symtree->n.sym, &saved_loopvar);
|
1480 |
|
|
}
|
1481 |
|
|
}
|
1482 |
|
|
mpz_clear (size);
|
1483 |
|
|
}
|
1484 |
|
|
|
1485 |
|
|
|
1486 |
|
|
/* Figure out the string length of a variable reference expression.
|
1487 |
|
|
Used by get_array_ctor_strlen. */
|
1488 |
|
|
|
1489 |
|
|
static void
|
1490 |
|
|
get_array_ctor_var_strlen (gfc_expr * expr, tree * len)
|
1491 |
|
|
{
|
1492 |
|
|
gfc_ref *ref;
|
1493 |
|
|
gfc_typespec *ts;
|
1494 |
|
|
mpz_t char_len;
|
1495 |
|
|
|
1496 |
|
|
/* Don't bother if we already know the length is a constant. */
|
1497 |
|
|
if (*len && INTEGER_CST_P (*len))
|
1498 |
|
|
return;
|
1499 |
|
|
|
1500 |
|
|
ts = &expr->symtree->n.sym->ts;
|
1501 |
|
|
for (ref = expr->ref; ref; ref = ref->next)
|
1502 |
|
|
{
|
1503 |
|
|
switch (ref->type)
|
1504 |
|
|
{
|
1505 |
|
|
case REF_ARRAY:
|
1506 |
|
|
/* Array references don't change the string length. */
|
1507 |
|
|
break;
|
1508 |
|
|
|
1509 |
|
|
case REF_COMPONENT:
|
1510 |
|
|
/* Use the length of the component. */
|
1511 |
|
|
ts = &ref->u.c.component->ts;
|
1512 |
|
|
break;
|
1513 |
|
|
|
1514 |
|
|
case REF_SUBSTRING:
|
1515 |
|
|
if (ref->u.ss.start->expr_type != EXPR_CONSTANT
|
1516 |
|
|
|| ref->u.ss.end->expr_type != EXPR_CONSTANT)
|
1517 |
|
|
break;
|
1518 |
|
|
mpz_init_set_ui (char_len, 1);
|
1519 |
|
|
mpz_add (char_len, char_len, ref->u.ss.end->value.integer);
|
1520 |
|
|
mpz_sub (char_len, char_len, ref->u.ss.start->value.integer);
|
1521 |
|
|
*len = gfc_conv_mpz_to_tree (char_len, gfc_default_integer_kind);
|
1522 |
|
|
*len = convert (gfc_charlen_type_node, *len);
|
1523 |
|
|
mpz_clear (char_len);
|
1524 |
|
|
return;
|
1525 |
|
|
|
1526 |
|
|
default:
|
1527 |
|
|
/* TODO: Substrings are tricky because we can't evaluate the
|
1528 |
|
|
expression more than once. For now we just give up, and hope
|
1529 |
|
|
we can figure it out elsewhere. */
|
1530 |
|
|
return;
|
1531 |
|
|
}
|
1532 |
|
|
}
|
1533 |
|
|
|
1534 |
|
|
*len = ts->u.cl->backend_decl;
|
1535 |
|
|
}
|
1536 |
|
|
|
1537 |
|
|
|
1538 |
|
|
/* A catch-all to obtain the string length for anything that is not a
|
1539 |
|
|
constant, array or variable. */
|
1540 |
|
|
static void
|
1541 |
|
|
get_array_ctor_all_strlen (stmtblock_t *block, gfc_expr *e, tree *len)
|
1542 |
|
|
{
|
1543 |
|
|
gfc_se se;
|
1544 |
|
|
gfc_ss *ss;
|
1545 |
|
|
|
1546 |
|
|
/* Don't bother if we already know the length is a constant. */
|
1547 |
|
|
if (*len && INTEGER_CST_P (*len))
|
1548 |
|
|
return;
|
1549 |
|
|
|
1550 |
|
|
if (!e->ref && e->ts.u.cl && e->ts.u.cl->length
|
1551 |
|
|
&& e->ts.u.cl->length->expr_type == EXPR_CONSTANT)
|
1552 |
|
|
{
|
1553 |
|
|
/* This is easy. */
|
1554 |
|
|
gfc_conv_const_charlen (e->ts.u.cl);
|
1555 |
|
|
*len = e->ts.u.cl->backend_decl;
|
1556 |
|
|
}
|
1557 |
|
|
else
|
1558 |
|
|
{
|
1559 |
|
|
/* Otherwise, be brutal even if inefficient. */
|
1560 |
|
|
ss = gfc_walk_expr (e);
|
1561 |
|
|
gfc_init_se (&se, NULL);
|
1562 |
|
|
|
1563 |
|
|
/* No function call, in case of side effects. */
|
1564 |
|
|
se.no_function_call = 1;
|
1565 |
|
|
if (ss == gfc_ss_terminator)
|
1566 |
|
|
gfc_conv_expr (&se, e);
|
1567 |
|
|
else
|
1568 |
|
|
gfc_conv_expr_descriptor (&se, e, ss);
|
1569 |
|
|
|
1570 |
|
|
/* Fix the value. */
|
1571 |
|
|
*len = gfc_evaluate_now (se.string_length, &se.pre);
|
1572 |
|
|
|
1573 |
|
|
gfc_add_block_to_block (block, &se.pre);
|
1574 |
|
|
gfc_add_block_to_block (block, &se.post);
|
1575 |
|
|
|
1576 |
|
|
e->ts.u.cl->backend_decl = *len;
|
1577 |
|
|
}
|
1578 |
|
|
}
|
1579 |
|
|
|
1580 |
|
|
|
1581 |
|
|
/* Figure out the string length of a character array constructor.
|
1582 |
|
|
If len is NULL, don't calculate the length; this happens for recursive calls
|
1583 |
|
|
when a sub-array-constructor is an element but not at the first position,
|
1584 |
|
|
so when we're not interested in the length.
|
1585 |
|
|
Returns TRUE if all elements are character constants. */
|
1586 |
|
|
|
1587 |
|
|
bool
|
1588 |
|
|
get_array_ctor_strlen (stmtblock_t *block, gfc_constructor * c, tree * len)
|
1589 |
|
|
{
|
1590 |
|
|
bool is_const;
|
1591 |
|
|
|
1592 |
|
|
is_const = TRUE;
|
1593 |
|
|
|
1594 |
|
|
if (c == NULL)
|
1595 |
|
|
{
|
1596 |
|
|
if (len)
|
1597 |
|
|
*len = build_int_cstu (gfc_charlen_type_node, 0);
|
1598 |
|
|
return is_const;
|
1599 |
|
|
}
|
1600 |
|
|
|
1601 |
|
|
/* Loop over all constructor elements to find out is_const, but in len we
|
1602 |
|
|
want to store the length of the first, not the last, element. We can
|
1603 |
|
|
of course exit the loop as soon as is_const is found to be false. */
|
1604 |
|
|
for (; c && is_const; c = c->next)
|
1605 |
|
|
{
|
1606 |
|
|
switch (c->expr->expr_type)
|
1607 |
|
|
{
|
1608 |
|
|
case EXPR_CONSTANT:
|
1609 |
|
|
if (len && !(*len && INTEGER_CST_P (*len)))
|
1610 |
|
|
*len = build_int_cstu (gfc_charlen_type_node,
|
1611 |
|
|
c->expr->value.character.length);
|
1612 |
|
|
break;
|
1613 |
|
|
|
1614 |
|
|
case EXPR_ARRAY:
|
1615 |
|
|
if (!get_array_ctor_strlen (block, c->expr->value.constructor, len))
|
1616 |
|
|
is_const = false;
|
1617 |
|
|
break;
|
1618 |
|
|
|
1619 |
|
|
case EXPR_VARIABLE:
|
1620 |
|
|
is_const = false;
|
1621 |
|
|
if (len)
|
1622 |
|
|
get_array_ctor_var_strlen (c->expr, len);
|
1623 |
|
|
break;
|
1624 |
|
|
|
1625 |
|
|
default:
|
1626 |
|
|
is_const = false;
|
1627 |
|
|
if (len)
|
1628 |
|
|
get_array_ctor_all_strlen (block, c->expr, len);
|
1629 |
|
|
break;
|
1630 |
|
|
}
|
1631 |
|
|
|
1632 |
|
|
/* After the first iteration, we don't want the length modified. */
|
1633 |
|
|
len = NULL;
|
1634 |
|
|
}
|
1635 |
|
|
|
1636 |
|
|
return is_const;
|
1637 |
|
|
}
|
1638 |
|
|
|
1639 |
|
|
/* Check whether the array constructor C consists entirely of constant
|
1640 |
|
|
elements, and if so returns the number of those elements, otherwise
|
1641 |
|
|
return zero. Note, an empty or NULL array constructor returns zero. */
|
1642 |
|
|
|
1643 |
|
|
unsigned HOST_WIDE_INT
|
1644 |
|
|
gfc_constant_array_constructor_p (gfc_constructor * c)
|
1645 |
|
|
{
|
1646 |
|
|
unsigned HOST_WIDE_INT nelem = 0;
|
1647 |
|
|
|
1648 |
|
|
while (c)
|
1649 |
|
|
{
|
1650 |
|
|
if (c->iterator
|
1651 |
|
|
|| c->expr->rank > 0
|
1652 |
|
|
|| c->expr->expr_type != EXPR_CONSTANT)
|
1653 |
|
|
return 0;
|
1654 |
|
|
c = c->next;
|
1655 |
|
|
nelem++;
|
1656 |
|
|
}
|
1657 |
|
|
return nelem;
|
1658 |
|
|
}
|
1659 |
|
|
|
1660 |
|
|
|
1661 |
|
|
/* Given EXPR, the constant array constructor specified by an EXPR_ARRAY,
|
1662 |
|
|
and the tree type of it's elements, TYPE, return a static constant
|
1663 |
|
|
variable that is compile-time initialized. */
|
1664 |
|
|
|
1665 |
|
|
tree
|
1666 |
|
|
gfc_build_constant_array_constructor (gfc_expr * expr, tree type)
|
1667 |
|
|
{
|
1668 |
|
|
tree tmptype, list, init, tmp;
|
1669 |
|
|
HOST_WIDE_INT nelem;
|
1670 |
|
|
gfc_constructor *c;
|
1671 |
|
|
gfc_array_spec as;
|
1672 |
|
|
gfc_se se;
|
1673 |
|
|
int i;
|
1674 |
|
|
|
1675 |
|
|
/* First traverse the constructor list, converting the constants
|
1676 |
|
|
to tree to build an initializer. */
|
1677 |
|
|
nelem = 0;
|
1678 |
|
|
list = NULL_TREE;
|
1679 |
|
|
c = expr->value.constructor;
|
1680 |
|
|
while (c)
|
1681 |
|
|
{
|
1682 |
|
|
gfc_init_se (&se, NULL);
|
1683 |
|
|
gfc_conv_constant (&se, c->expr);
|
1684 |
|
|
if (c->expr->ts.type != BT_CHARACTER)
|
1685 |
|
|
se.expr = fold_convert (type, se.expr);
|
1686 |
|
|
else if (POINTER_TYPE_P (type))
|
1687 |
|
|
se.expr = gfc_build_addr_expr (gfc_get_pchar_type (c->expr->ts.kind),
|
1688 |
|
|
se.expr);
|
1689 |
|
|
list = tree_cons (build_int_cst (gfc_array_index_type, nelem),
|
1690 |
|
|
se.expr, list);
|
1691 |
|
|
c = c->next;
|
1692 |
|
|
nelem++;
|
1693 |
|
|
}
|
1694 |
|
|
|
1695 |
|
|
/* Next determine the tree type for the array. We use the gfortran
|
1696 |
|
|
front-end's gfc_get_nodesc_array_type in order to create a suitable
|
1697 |
|
|
GFC_ARRAY_TYPE_P that may be used by the scalarizer. */
|
1698 |
|
|
|
1699 |
|
|
memset (&as, 0, sizeof (gfc_array_spec));
|
1700 |
|
|
|
1701 |
|
|
as.rank = expr->rank;
|
1702 |
|
|
as.type = AS_EXPLICIT;
|
1703 |
|
|
if (!expr->shape)
|
1704 |
|
|
{
|
1705 |
|
|
as.lower[0] = gfc_int_expr (0);
|
1706 |
|
|
as.upper[0] = gfc_int_expr (nelem - 1);
|
1707 |
|
|
}
|
1708 |
|
|
else
|
1709 |
|
|
for (i = 0; i < expr->rank; i++)
|
1710 |
|
|
{
|
1711 |
|
|
int tmp = (int) mpz_get_si (expr->shape[i]);
|
1712 |
|
|
as.lower[i] = gfc_int_expr (0);
|
1713 |
|
|
as.upper[i] = gfc_int_expr (tmp - 1);
|
1714 |
|
|
}
|
1715 |
|
|
|
1716 |
|
|
tmptype = gfc_get_nodesc_array_type (type, &as, PACKED_STATIC, true);
|
1717 |
|
|
|
1718 |
|
|
init = build_constructor_from_list (tmptype, nreverse (list));
|
1719 |
|
|
|
1720 |
|
|
TREE_CONSTANT (init) = 1;
|
1721 |
|
|
TREE_STATIC (init) = 1;
|
1722 |
|
|
|
1723 |
|
|
tmp = gfc_create_var (tmptype, "A");
|
1724 |
|
|
TREE_STATIC (tmp) = 1;
|
1725 |
|
|
TREE_CONSTANT (tmp) = 1;
|
1726 |
|
|
TREE_READONLY (tmp) = 1;
|
1727 |
|
|
DECL_INITIAL (tmp) = init;
|
1728 |
|
|
|
1729 |
|
|
return tmp;
|
1730 |
|
|
}
|
1731 |
|
|
|
1732 |
|
|
|
1733 |
|
|
/* Translate a constant EXPR_ARRAY array constructor for the scalarizer.
|
1734 |
|
|
This mostly initializes the scalarizer state info structure with the
|
1735 |
|
|
appropriate values to directly use the array created by the function
|
1736 |
|
|
gfc_build_constant_array_constructor. */
|
1737 |
|
|
|
1738 |
|
|
static void
|
1739 |
|
|
gfc_trans_constant_array_constructor (gfc_loopinfo * loop,
|
1740 |
|
|
gfc_ss * ss, tree type)
|
1741 |
|
|
{
|
1742 |
|
|
gfc_ss_info *info;
|
1743 |
|
|
tree tmp;
|
1744 |
|
|
int i;
|
1745 |
|
|
|
1746 |
|
|
tmp = gfc_build_constant_array_constructor (ss->expr, type);
|
1747 |
|
|
|
1748 |
|
|
info = &ss->data.info;
|
1749 |
|
|
|
1750 |
|
|
info->descriptor = tmp;
|
1751 |
|
|
info->data = gfc_build_addr_expr (NULL_TREE, tmp);
|
1752 |
|
|
info->offset = gfc_index_zero_node;
|
1753 |
|
|
|
1754 |
|
|
for (i = 0; i < info->dimen; i++)
|
1755 |
|
|
{
|
1756 |
|
|
info->delta[i] = gfc_index_zero_node;
|
1757 |
|
|
info->start[i] = gfc_index_zero_node;
|
1758 |
|
|
info->end[i] = gfc_index_zero_node;
|
1759 |
|
|
info->stride[i] = gfc_index_one_node;
|
1760 |
|
|
info->dim[i] = i;
|
1761 |
|
|
}
|
1762 |
|
|
|
1763 |
|
|
if (info->dimen > loop->temp_dim)
|
1764 |
|
|
loop->temp_dim = info->dimen;
|
1765 |
|
|
}
|
1766 |
|
|
|
1767 |
|
|
/* Helper routine of gfc_trans_array_constructor to determine if the
|
1768 |
|
|
bounds of the loop specified by LOOP are constant and simple enough
|
1769 |
|
|
to use with gfc_trans_constant_array_constructor. Returns the
|
1770 |
|
|
iteration count of the loop if suitable, and NULL_TREE otherwise. */
|
1771 |
|
|
|
1772 |
|
|
static tree
|
1773 |
|
|
constant_array_constructor_loop_size (gfc_loopinfo * loop)
|
1774 |
|
|
{
|
1775 |
|
|
tree size = gfc_index_one_node;
|
1776 |
|
|
tree tmp;
|
1777 |
|
|
int i;
|
1778 |
|
|
|
1779 |
|
|
for (i = 0; i < loop->dimen; i++)
|
1780 |
|
|
{
|
1781 |
|
|
/* If the bounds aren't constant, return NULL_TREE. */
|
1782 |
|
|
if (!INTEGER_CST_P (loop->from[i]) || !INTEGER_CST_P (loop->to[i]))
|
1783 |
|
|
return NULL_TREE;
|
1784 |
|
|
if (!integer_zerop (loop->from[i]))
|
1785 |
|
|
{
|
1786 |
|
|
/* Only allow nonzero "from" in one-dimensional arrays. */
|
1787 |
|
|
if (loop->dimen != 1)
|
1788 |
|
|
return NULL_TREE;
|
1789 |
|
|
tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
1790 |
|
|
loop->to[i], loop->from[i]);
|
1791 |
|
|
}
|
1792 |
|
|
else
|
1793 |
|
|
tmp = loop->to[i];
|
1794 |
|
|
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
1795 |
|
|
tmp, gfc_index_one_node);
|
1796 |
|
|
size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
|
1797 |
|
|
}
|
1798 |
|
|
|
1799 |
|
|
return size;
|
1800 |
|
|
}
|
1801 |
|
|
|
1802 |
|
|
|
1803 |
|
|
/* Array constructors are handled by constructing a temporary, then using that
|
1804 |
|
|
within the scalarization loop. This is not optimal, but seems by far the
|
1805 |
|
|
simplest method. */
|
1806 |
|
|
|
1807 |
|
|
static void
|
1808 |
|
|
gfc_trans_array_constructor (gfc_loopinfo * loop, gfc_ss * ss, locus * where)
|
1809 |
|
|
{
|
1810 |
|
|
gfc_constructor *c;
|
1811 |
|
|
tree offset;
|
1812 |
|
|
tree offsetvar;
|
1813 |
|
|
tree desc;
|
1814 |
|
|
tree type;
|
1815 |
|
|
bool dynamic;
|
1816 |
|
|
bool old_first_len, old_typespec_chararray_ctor;
|
1817 |
|
|
tree old_first_len_val;
|
1818 |
|
|
|
1819 |
|
|
/* Save the old values for nested checking. */
|
1820 |
|
|
old_first_len = first_len;
|
1821 |
|
|
old_first_len_val = first_len_val;
|
1822 |
|
|
old_typespec_chararray_ctor = typespec_chararray_ctor;
|
1823 |
|
|
|
1824 |
|
|
/* Do bounds-checking here and in gfc_trans_array_ctor_element only if no
|
1825 |
|
|
typespec was given for the array constructor. */
|
1826 |
|
|
typespec_chararray_ctor = (ss->expr->ts.u.cl
|
1827 |
|
|
&& ss->expr->ts.u.cl->length_from_typespec);
|
1828 |
|
|
|
1829 |
|
|
if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
|
1830 |
|
|
&& ss->expr->ts.type == BT_CHARACTER && !typespec_chararray_ctor)
|
1831 |
|
|
{
|
1832 |
|
|
first_len_val = gfc_create_var (gfc_charlen_type_node, "len");
|
1833 |
|
|
first_len = true;
|
1834 |
|
|
}
|
1835 |
|
|
|
1836 |
|
|
ss->data.info.dimen = loop->dimen;
|
1837 |
|
|
|
1838 |
|
|
c = ss->expr->value.constructor;
|
1839 |
|
|
if (ss->expr->ts.type == BT_CHARACTER)
|
1840 |
|
|
{
|
1841 |
|
|
bool const_string;
|
1842 |
|
|
|
1843 |
|
|
/* get_array_ctor_strlen walks the elements of the constructor, if a
|
1844 |
|
|
typespec was given, we already know the string length and want the one
|
1845 |
|
|
specified there. */
|
1846 |
|
|
if (typespec_chararray_ctor && ss->expr->ts.u.cl->length
|
1847 |
|
|
&& ss->expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
|
1848 |
|
|
{
|
1849 |
|
|
gfc_se length_se;
|
1850 |
|
|
|
1851 |
|
|
const_string = false;
|
1852 |
|
|
gfc_init_se (&length_se, NULL);
|
1853 |
|
|
gfc_conv_expr_type (&length_se, ss->expr->ts.u.cl->length,
|
1854 |
|
|
gfc_charlen_type_node);
|
1855 |
|
|
ss->string_length = length_se.expr;
|
1856 |
|
|
gfc_add_block_to_block (&loop->pre, &length_se.pre);
|
1857 |
|
|
gfc_add_block_to_block (&loop->post, &length_se.post);
|
1858 |
|
|
}
|
1859 |
|
|
else
|
1860 |
|
|
const_string = get_array_ctor_strlen (&loop->pre, c,
|
1861 |
|
|
&ss->string_length);
|
1862 |
|
|
|
1863 |
|
|
/* Complex character array constructors should have been taken care of
|
1864 |
|
|
and not end up here. */
|
1865 |
|
|
gcc_assert (ss->string_length);
|
1866 |
|
|
|
1867 |
|
|
ss->expr->ts.u.cl->backend_decl = ss->string_length;
|
1868 |
|
|
|
1869 |
|
|
type = gfc_get_character_type_len (ss->expr->ts.kind, ss->string_length);
|
1870 |
|
|
if (const_string)
|
1871 |
|
|
type = build_pointer_type (type);
|
1872 |
|
|
}
|
1873 |
|
|
else
|
1874 |
|
|
type = gfc_typenode_for_spec (&ss->expr->ts);
|
1875 |
|
|
|
1876 |
|
|
/* See if the constructor determines the loop bounds. */
|
1877 |
|
|
dynamic = false;
|
1878 |
|
|
|
1879 |
|
|
if (ss->expr->shape && loop->dimen > 1 && loop->to[0] == NULL_TREE)
|
1880 |
|
|
{
|
1881 |
|
|
/* We have a multidimensional parameter. */
|
1882 |
|
|
int n;
|
1883 |
|
|
for (n = 0; n < ss->expr->rank; n++)
|
1884 |
|
|
{
|
1885 |
|
|
loop->from[n] = gfc_index_zero_node;
|
1886 |
|
|
loop->to[n] = gfc_conv_mpz_to_tree (ss->expr->shape [n],
|
1887 |
|
|
gfc_index_integer_kind);
|
1888 |
|
|
loop->to[n] = fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
1889 |
|
|
loop->to[n], gfc_index_one_node);
|
1890 |
|
|
}
|
1891 |
|
|
}
|
1892 |
|
|
|
1893 |
|
|
if (loop->to[0] == NULL_TREE)
|
1894 |
|
|
{
|
1895 |
|
|
mpz_t size;
|
1896 |
|
|
|
1897 |
|
|
/* We should have a 1-dimensional, zero-based loop. */
|
1898 |
|
|
gcc_assert (loop->dimen == 1);
|
1899 |
|
|
gcc_assert (integer_zerop (loop->from[0]));
|
1900 |
|
|
|
1901 |
|
|
/* Split the constructor size into a static part and a dynamic part.
|
1902 |
|
|
Allocate the static size up-front and record whether the dynamic
|
1903 |
|
|
size might be nonzero. */
|
1904 |
|
|
mpz_init (size);
|
1905 |
|
|
dynamic = gfc_get_array_constructor_size (&size, c);
|
1906 |
|
|
mpz_sub_ui (size, size, 1);
|
1907 |
|
|
loop->to[0] = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
|
1908 |
|
|
mpz_clear (size);
|
1909 |
|
|
}
|
1910 |
|
|
|
1911 |
|
|
/* Special case constant array constructors. */
|
1912 |
|
|
if (!dynamic)
|
1913 |
|
|
{
|
1914 |
|
|
unsigned HOST_WIDE_INT nelem = gfc_constant_array_constructor_p (c);
|
1915 |
|
|
if (nelem > 0)
|
1916 |
|
|
{
|
1917 |
|
|
tree size = constant_array_constructor_loop_size (loop);
|
1918 |
|
|
if (size && compare_tree_int (size, nelem) == 0)
|
1919 |
|
|
{
|
1920 |
|
|
gfc_trans_constant_array_constructor (loop, ss, type);
|
1921 |
|
|
goto finish;
|
1922 |
|
|
}
|
1923 |
|
|
}
|
1924 |
|
|
}
|
1925 |
|
|
|
1926 |
|
|
gfc_trans_create_temp_array (&loop->pre, &loop->post, loop, &ss->data.info,
|
1927 |
|
|
type, NULL_TREE, dynamic, true, false, where);
|
1928 |
|
|
|
1929 |
|
|
desc = ss->data.info.descriptor;
|
1930 |
|
|
offset = gfc_index_zero_node;
|
1931 |
|
|
offsetvar = gfc_create_var_np (gfc_array_index_type, "offset");
|
1932 |
|
|
TREE_NO_WARNING (offsetvar) = 1;
|
1933 |
|
|
TREE_USED (offsetvar) = 0;
|
1934 |
|
|
gfc_trans_array_constructor_value (&loop->pre, type, desc, c,
|
1935 |
|
|
&offset, &offsetvar, dynamic);
|
1936 |
|
|
|
1937 |
|
|
/* If the array grows dynamically, the upper bound of the loop variable
|
1938 |
|
|
is determined by the array's final upper bound. */
|
1939 |
|
|
if (dynamic)
|
1940 |
|
|
loop->to[0] = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[0]);
|
1941 |
|
|
|
1942 |
|
|
if (TREE_USED (offsetvar))
|
1943 |
|
|
pushdecl (offsetvar);
|
1944 |
|
|
else
|
1945 |
|
|
gcc_assert (INTEGER_CST_P (offset));
|
1946 |
|
|
#if 0
|
1947 |
|
|
/* Disable bound checking for now because it's probably broken. */
|
1948 |
|
|
if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
|
1949 |
|
|
{
|
1950 |
|
|
gcc_unreachable ();
|
1951 |
|
|
}
|
1952 |
|
|
#endif
|
1953 |
|
|
|
1954 |
|
|
finish:
|
1955 |
|
|
/* Restore old values of globals. */
|
1956 |
|
|
first_len = old_first_len;
|
1957 |
|
|
first_len_val = old_first_len_val;
|
1958 |
|
|
typespec_chararray_ctor = old_typespec_chararray_ctor;
|
1959 |
|
|
}
|
1960 |
|
|
|
1961 |
|
|
|
1962 |
|
|
/* INFO describes a GFC_SS_SECTION in loop LOOP, and this function is
|
1963 |
|
|
called after evaluating all of INFO's vector dimensions. Go through
|
1964 |
|
|
each such vector dimension and see if we can now fill in any missing
|
1965 |
|
|
loop bounds. */
|
1966 |
|
|
|
1967 |
|
|
static void
|
1968 |
|
|
gfc_set_vector_loop_bounds (gfc_loopinfo * loop, gfc_ss_info * info)
|
1969 |
|
|
{
|
1970 |
|
|
gfc_se se;
|
1971 |
|
|
tree tmp;
|
1972 |
|
|
tree desc;
|
1973 |
|
|
tree zero;
|
1974 |
|
|
int n;
|
1975 |
|
|
int dim;
|
1976 |
|
|
|
1977 |
|
|
for (n = 0; n < loop->dimen; n++)
|
1978 |
|
|
{
|
1979 |
|
|
dim = info->dim[n];
|
1980 |
|
|
if (info->ref->u.ar.dimen_type[dim] == DIMEN_VECTOR
|
1981 |
|
|
&& loop->to[n] == NULL)
|
1982 |
|
|
{
|
1983 |
|
|
/* Loop variable N indexes vector dimension DIM, and we don't
|
1984 |
|
|
yet know the upper bound of loop variable N. Set it to the
|
1985 |
|
|
difference between the vector's upper and lower bounds. */
|
1986 |
|
|
gcc_assert (loop->from[n] == gfc_index_zero_node);
|
1987 |
|
|
gcc_assert (info->subscript[dim]
|
1988 |
|
|
&& info->subscript[dim]->type == GFC_SS_VECTOR);
|
1989 |
|
|
|
1990 |
|
|
gfc_init_se (&se, NULL);
|
1991 |
|
|
desc = info->subscript[dim]->data.info.descriptor;
|
1992 |
|
|
zero = gfc_rank_cst[0];
|
1993 |
|
|
tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
1994 |
|
|
gfc_conv_descriptor_ubound_get (desc, zero),
|
1995 |
|
|
gfc_conv_descriptor_lbound_get (desc, zero));
|
1996 |
|
|
tmp = gfc_evaluate_now (tmp, &loop->pre);
|
1997 |
|
|
loop->to[n] = tmp;
|
1998 |
|
|
}
|
1999 |
|
|
}
|
2000 |
|
|
}
|
2001 |
|
|
|
2002 |
|
|
|
2003 |
|
|
/* Add the pre and post chains for all the scalar expressions in a SS chain
|
2004 |
|
|
to loop. This is called after the loop parameters have been calculated,
|
2005 |
|
|
but before the actual scalarizing loops. */
|
2006 |
|
|
|
2007 |
|
|
static void
|
2008 |
|
|
gfc_add_loop_ss_code (gfc_loopinfo * loop, gfc_ss * ss, bool subscript,
|
2009 |
|
|
locus * where)
|
2010 |
|
|
{
|
2011 |
|
|
gfc_se se;
|
2012 |
|
|
int n;
|
2013 |
|
|
|
2014 |
|
|
/* TODO: This can generate bad code if there are ordering dependencies,
|
2015 |
|
|
e.g., a callee allocated function and an unknown size constructor. */
|
2016 |
|
|
gcc_assert (ss != NULL);
|
2017 |
|
|
|
2018 |
|
|
for (; ss != gfc_ss_terminator; ss = ss->loop_chain)
|
2019 |
|
|
{
|
2020 |
|
|
gcc_assert (ss);
|
2021 |
|
|
|
2022 |
|
|
switch (ss->type)
|
2023 |
|
|
{
|
2024 |
|
|
case GFC_SS_SCALAR:
|
2025 |
|
|
/* Scalar expression. Evaluate this now. This includes elemental
|
2026 |
|
|
dimension indices, but not array section bounds. */
|
2027 |
|
|
gfc_init_se (&se, NULL);
|
2028 |
|
|
gfc_conv_expr (&se, ss->expr);
|
2029 |
|
|
gfc_add_block_to_block (&loop->pre, &se.pre);
|
2030 |
|
|
|
2031 |
|
|
if (ss->expr->ts.type != BT_CHARACTER)
|
2032 |
|
|
{
|
2033 |
|
|
/* Move the evaluation of scalar expressions outside the
|
2034 |
|
|
scalarization loop, except for WHERE assignments. */
|
2035 |
|
|
if (subscript)
|
2036 |
|
|
se.expr = convert(gfc_array_index_type, se.expr);
|
2037 |
|
|
if (!ss->where)
|
2038 |
|
|
se.expr = gfc_evaluate_now (se.expr, &loop->pre);
|
2039 |
|
|
gfc_add_block_to_block (&loop->pre, &se.post);
|
2040 |
|
|
}
|
2041 |
|
|
else
|
2042 |
|
|
gfc_add_block_to_block (&loop->post, &se.post);
|
2043 |
|
|
|
2044 |
|
|
ss->data.scalar.expr = se.expr;
|
2045 |
|
|
ss->string_length = se.string_length;
|
2046 |
|
|
break;
|
2047 |
|
|
|
2048 |
|
|
case GFC_SS_REFERENCE:
|
2049 |
|
|
/* Scalar argument to elemental procedure. Evaluate this
|
2050 |
|
|
now. */
|
2051 |
|
|
gfc_init_se (&se, NULL);
|
2052 |
|
|
gfc_conv_expr (&se, ss->expr);
|
2053 |
|
|
gfc_add_block_to_block (&loop->pre, &se.pre);
|
2054 |
|
|
gfc_add_block_to_block (&loop->post, &se.post);
|
2055 |
|
|
|
2056 |
|
|
ss->data.scalar.expr = gfc_evaluate_now (se.expr, &loop->pre);
|
2057 |
|
|
ss->string_length = se.string_length;
|
2058 |
|
|
break;
|
2059 |
|
|
|
2060 |
|
|
case GFC_SS_SECTION:
|
2061 |
|
|
/* Add the expressions for scalar and vector subscripts. */
|
2062 |
|
|
for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
|
2063 |
|
|
if (ss->data.info.subscript[n])
|
2064 |
|
|
gfc_add_loop_ss_code (loop, ss->data.info.subscript[n], true,
|
2065 |
|
|
where);
|
2066 |
|
|
|
2067 |
|
|
gfc_set_vector_loop_bounds (loop, &ss->data.info);
|
2068 |
|
|
break;
|
2069 |
|
|
|
2070 |
|
|
case GFC_SS_VECTOR:
|
2071 |
|
|
/* Get the vector's descriptor and store it in SS. */
|
2072 |
|
|
gfc_init_se (&se, NULL);
|
2073 |
|
|
gfc_conv_expr_descriptor (&se, ss->expr, gfc_walk_expr (ss->expr));
|
2074 |
|
|
gfc_add_block_to_block (&loop->pre, &se.pre);
|
2075 |
|
|
gfc_add_block_to_block (&loop->post, &se.post);
|
2076 |
|
|
ss->data.info.descriptor = se.expr;
|
2077 |
|
|
break;
|
2078 |
|
|
|
2079 |
|
|
case GFC_SS_INTRINSIC:
|
2080 |
|
|
gfc_add_intrinsic_ss_code (loop, ss);
|
2081 |
|
|
break;
|
2082 |
|
|
|
2083 |
|
|
case GFC_SS_FUNCTION:
|
2084 |
|
|
/* Array function return value. We call the function and save its
|
2085 |
|
|
result in a temporary for use inside the loop. */
|
2086 |
|
|
gfc_init_se (&se, NULL);
|
2087 |
|
|
se.loop = loop;
|
2088 |
|
|
se.ss = ss;
|
2089 |
|
|
gfc_conv_expr (&se, ss->expr);
|
2090 |
|
|
gfc_add_block_to_block (&loop->pre, &se.pre);
|
2091 |
|
|
gfc_add_block_to_block (&loop->post, &se.post);
|
2092 |
|
|
ss->string_length = se.string_length;
|
2093 |
|
|
break;
|
2094 |
|
|
|
2095 |
|
|
case GFC_SS_CONSTRUCTOR:
|
2096 |
|
|
if (ss->expr->ts.type == BT_CHARACTER
|
2097 |
|
|
&& ss->string_length == NULL
|
2098 |
|
|
&& ss->expr->ts.u.cl
|
2099 |
|
|
&& ss->expr->ts.u.cl->length)
|
2100 |
|
|
{
|
2101 |
|
|
gfc_init_se (&se, NULL);
|
2102 |
|
|
gfc_conv_expr_type (&se, ss->expr->ts.u.cl->length,
|
2103 |
|
|
gfc_charlen_type_node);
|
2104 |
|
|
ss->string_length = se.expr;
|
2105 |
|
|
gfc_add_block_to_block (&loop->pre, &se.pre);
|
2106 |
|
|
gfc_add_block_to_block (&loop->post, &se.post);
|
2107 |
|
|
}
|
2108 |
|
|
gfc_trans_array_constructor (loop, ss, where);
|
2109 |
|
|
break;
|
2110 |
|
|
|
2111 |
|
|
case GFC_SS_TEMP:
|
2112 |
|
|
case GFC_SS_COMPONENT:
|
2113 |
|
|
/* Do nothing. These are handled elsewhere. */
|
2114 |
|
|
break;
|
2115 |
|
|
|
2116 |
|
|
default:
|
2117 |
|
|
gcc_unreachable ();
|
2118 |
|
|
}
|
2119 |
|
|
}
|
2120 |
|
|
}
|
2121 |
|
|
|
2122 |
|
|
|
2123 |
|
|
/* Translate expressions for the descriptor and data pointer of a SS. */
|
2124 |
|
|
/*GCC ARRAYS*/
|
2125 |
|
|
|
2126 |
|
|
static void
|
2127 |
|
|
gfc_conv_ss_descriptor (stmtblock_t * block, gfc_ss * ss, int base)
|
2128 |
|
|
{
|
2129 |
|
|
gfc_se se;
|
2130 |
|
|
tree tmp;
|
2131 |
|
|
|
2132 |
|
|
/* Get the descriptor for the array to be scalarized. */
|
2133 |
|
|
gcc_assert (ss->expr->expr_type == EXPR_VARIABLE);
|
2134 |
|
|
gfc_init_se (&se, NULL);
|
2135 |
|
|
se.descriptor_only = 1;
|
2136 |
|
|
gfc_conv_expr_lhs (&se, ss->expr);
|
2137 |
|
|
gfc_add_block_to_block (block, &se.pre);
|
2138 |
|
|
ss->data.info.descriptor = se.expr;
|
2139 |
|
|
ss->string_length = se.string_length;
|
2140 |
|
|
|
2141 |
|
|
if (base)
|
2142 |
|
|
{
|
2143 |
|
|
/* Also the data pointer. */
|
2144 |
|
|
tmp = gfc_conv_array_data (se.expr);
|
2145 |
|
|
/* If this is a variable or address of a variable we use it directly.
|
2146 |
|
|
Otherwise we must evaluate it now to avoid breaking dependency
|
2147 |
|
|
analysis by pulling the expressions for elemental array indices
|
2148 |
|
|
inside the loop. */
|
2149 |
|
|
if (!(DECL_P (tmp)
|
2150 |
|
|
|| (TREE_CODE (tmp) == ADDR_EXPR
|
2151 |
|
|
&& DECL_P (TREE_OPERAND (tmp, 0)))))
|
2152 |
|
|
tmp = gfc_evaluate_now (tmp, block);
|
2153 |
|
|
ss->data.info.data = tmp;
|
2154 |
|
|
|
2155 |
|
|
tmp = gfc_conv_array_offset (se.expr);
|
2156 |
|
|
ss->data.info.offset = gfc_evaluate_now (tmp, block);
|
2157 |
|
|
}
|
2158 |
|
|
}
|
2159 |
|
|
|
2160 |
|
|
|
2161 |
|
|
/* Initialize a gfc_loopinfo structure. */
|
2162 |
|
|
|
2163 |
|
|
void
|
2164 |
|
|
gfc_init_loopinfo (gfc_loopinfo * loop)
|
2165 |
|
|
{
|
2166 |
|
|
int n;
|
2167 |
|
|
|
2168 |
|
|
memset (loop, 0, sizeof (gfc_loopinfo));
|
2169 |
|
|
gfc_init_block (&loop->pre);
|
2170 |
|
|
gfc_init_block (&loop->post);
|
2171 |
|
|
|
2172 |
|
|
/* Initially scalarize in order. */
|
2173 |
|
|
for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
|
2174 |
|
|
loop->order[n] = n;
|
2175 |
|
|
|
2176 |
|
|
loop->ss = gfc_ss_terminator;
|
2177 |
|
|
}
|
2178 |
|
|
|
2179 |
|
|
|
2180 |
|
|
/* Copies the loop variable info to a gfc_se structure. Does not copy the SS
|
2181 |
|
|
chain. */
|
2182 |
|
|
|
2183 |
|
|
void
|
2184 |
|
|
gfc_copy_loopinfo_to_se (gfc_se * se, gfc_loopinfo * loop)
|
2185 |
|
|
{
|
2186 |
|
|
se->loop = loop;
|
2187 |
|
|
}
|
2188 |
|
|
|
2189 |
|
|
|
2190 |
|
|
/* Return an expression for the data pointer of an array. */
|
2191 |
|
|
|
2192 |
|
|
tree
|
2193 |
|
|
gfc_conv_array_data (tree descriptor)
|
2194 |
|
|
{
|
2195 |
|
|
tree type;
|
2196 |
|
|
|
2197 |
|
|
type = TREE_TYPE (descriptor);
|
2198 |
|
|
if (GFC_ARRAY_TYPE_P (type))
|
2199 |
|
|
{
|
2200 |
|
|
if (TREE_CODE (type) == POINTER_TYPE)
|
2201 |
|
|
return descriptor;
|
2202 |
|
|
else
|
2203 |
|
|
{
|
2204 |
|
|
/* Descriptorless arrays. */
|
2205 |
|
|
return gfc_build_addr_expr (NULL_TREE, descriptor);
|
2206 |
|
|
}
|
2207 |
|
|
}
|
2208 |
|
|
else
|
2209 |
|
|
return gfc_conv_descriptor_data_get (descriptor);
|
2210 |
|
|
}
|
2211 |
|
|
|
2212 |
|
|
|
2213 |
|
|
/* Return an expression for the base offset of an array. */
|
2214 |
|
|
|
2215 |
|
|
tree
|
2216 |
|
|
gfc_conv_array_offset (tree descriptor)
|
2217 |
|
|
{
|
2218 |
|
|
tree type;
|
2219 |
|
|
|
2220 |
|
|
type = TREE_TYPE (descriptor);
|
2221 |
|
|
if (GFC_ARRAY_TYPE_P (type))
|
2222 |
|
|
return GFC_TYPE_ARRAY_OFFSET (type);
|
2223 |
|
|
else
|
2224 |
|
|
return gfc_conv_descriptor_offset_get (descriptor);
|
2225 |
|
|
}
|
2226 |
|
|
|
2227 |
|
|
|
2228 |
|
|
/* Get an expression for the array stride. */
|
2229 |
|
|
|
2230 |
|
|
tree
|
2231 |
|
|
gfc_conv_array_stride (tree descriptor, int dim)
|
2232 |
|
|
{
|
2233 |
|
|
tree tmp;
|
2234 |
|
|
tree type;
|
2235 |
|
|
|
2236 |
|
|
type = TREE_TYPE (descriptor);
|
2237 |
|
|
|
2238 |
|
|
/* For descriptorless arrays use the array size. */
|
2239 |
|
|
tmp = GFC_TYPE_ARRAY_STRIDE (type, dim);
|
2240 |
|
|
if (tmp != NULL_TREE)
|
2241 |
|
|
return tmp;
|
2242 |
|
|
|
2243 |
|
|
tmp = gfc_conv_descriptor_stride_get (descriptor, gfc_rank_cst[dim]);
|
2244 |
|
|
return tmp;
|
2245 |
|
|
}
|
2246 |
|
|
|
2247 |
|
|
|
2248 |
|
|
/* Like gfc_conv_array_stride, but for the lower bound. */
|
2249 |
|
|
|
2250 |
|
|
tree
|
2251 |
|
|
gfc_conv_array_lbound (tree descriptor, int dim)
|
2252 |
|
|
{
|
2253 |
|
|
tree tmp;
|
2254 |
|
|
tree type;
|
2255 |
|
|
|
2256 |
|
|
type = TREE_TYPE (descriptor);
|
2257 |
|
|
|
2258 |
|
|
tmp = GFC_TYPE_ARRAY_LBOUND (type, dim);
|
2259 |
|
|
if (tmp != NULL_TREE)
|
2260 |
|
|
return tmp;
|
2261 |
|
|
|
2262 |
|
|
tmp = gfc_conv_descriptor_lbound_get (descriptor, gfc_rank_cst[dim]);
|
2263 |
|
|
return tmp;
|
2264 |
|
|
}
|
2265 |
|
|
|
2266 |
|
|
|
2267 |
|
|
/* Like gfc_conv_array_stride, but for the upper bound. */
|
2268 |
|
|
|
2269 |
|
|
tree
|
2270 |
|
|
gfc_conv_array_ubound (tree descriptor, int dim)
|
2271 |
|
|
{
|
2272 |
|
|
tree tmp;
|
2273 |
|
|
tree type;
|
2274 |
|
|
|
2275 |
|
|
type = TREE_TYPE (descriptor);
|
2276 |
|
|
|
2277 |
|
|
tmp = GFC_TYPE_ARRAY_UBOUND (type, dim);
|
2278 |
|
|
if (tmp != NULL_TREE)
|
2279 |
|
|
return tmp;
|
2280 |
|
|
|
2281 |
|
|
/* This should only ever happen when passing an assumed shape array
|
2282 |
|
|
as an actual parameter. The value will never be used. */
|
2283 |
|
|
if (GFC_ARRAY_TYPE_P (TREE_TYPE (descriptor)))
|
2284 |
|
|
return gfc_index_zero_node;
|
2285 |
|
|
|
2286 |
|
|
tmp = gfc_conv_descriptor_ubound_get (descriptor, gfc_rank_cst[dim]);
|
2287 |
|
|
return tmp;
|
2288 |
|
|
}
|
2289 |
|
|
|
2290 |
|
|
|
2291 |
|
|
/* Generate code to perform an array index bound check. */
|
2292 |
|
|
|
2293 |
|
|
static tree
|
2294 |
|
|
gfc_trans_array_bound_check (gfc_se * se, tree descriptor, tree index, int n,
|
2295 |
|
|
locus * where, bool check_upper)
|
2296 |
|
|
{
|
2297 |
|
|
tree fault;
|
2298 |
|
|
tree tmp_lo, tmp_up;
|
2299 |
|
|
char *msg;
|
2300 |
|
|
const char * name = NULL;
|
2301 |
|
|
|
2302 |
|
|
if (!(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS))
|
2303 |
|
|
return index;
|
2304 |
|
|
|
2305 |
|
|
index = gfc_evaluate_now (index, &se->pre);
|
2306 |
|
|
|
2307 |
|
|
/* We find a name for the error message. */
|
2308 |
|
|
if (se->ss)
|
2309 |
|
|
name = se->ss->expr->symtree->name;
|
2310 |
|
|
|
2311 |
|
|
if (!name && se->loop && se->loop->ss && se->loop->ss->expr
|
2312 |
|
|
&& se->loop->ss->expr->symtree)
|
2313 |
|
|
name = se->loop->ss->expr->symtree->name;
|
2314 |
|
|
|
2315 |
|
|
if (!name && se->loop && se->loop->ss && se->loop->ss->loop_chain
|
2316 |
|
|
&& se->loop->ss->loop_chain->expr
|
2317 |
|
|
&& se->loop->ss->loop_chain->expr->symtree)
|
2318 |
|
|
name = se->loop->ss->loop_chain->expr->symtree->name;
|
2319 |
|
|
|
2320 |
|
|
if (!name && se->loop && se->loop->ss && se->loop->ss->expr)
|
2321 |
|
|
{
|
2322 |
|
|
if (se->loop->ss->expr->expr_type == EXPR_FUNCTION
|
2323 |
|
|
&& se->loop->ss->expr->value.function.name)
|
2324 |
|
|
name = se->loop->ss->expr->value.function.name;
|
2325 |
|
|
else
|
2326 |
|
|
if (se->loop->ss->type == GFC_SS_CONSTRUCTOR
|
2327 |
|
|
|| se->loop->ss->type == GFC_SS_SCALAR)
|
2328 |
|
|
name = "unnamed constant";
|
2329 |
|
|
}
|
2330 |
|
|
|
2331 |
|
|
if (TREE_CODE (descriptor) == VAR_DECL)
|
2332 |
|
|
name = IDENTIFIER_POINTER (DECL_NAME (descriptor));
|
2333 |
|
|
|
2334 |
|
|
/* If upper bound is present, include both bounds in the error message. */
|
2335 |
|
|
if (check_upper)
|
2336 |
|
|
{
|
2337 |
|
|
tmp_lo = gfc_conv_array_lbound (descriptor, n);
|
2338 |
|
|
tmp_up = gfc_conv_array_ubound (descriptor, n);
|
2339 |
|
|
|
2340 |
|
|
if (name)
|
2341 |
|
|
asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
|
2342 |
|
|
"outside of expected range (%%ld:%%ld)", n+1, name);
|
2343 |
|
|
else
|
2344 |
|
|
asprintf (&msg, "Index '%%ld' of dimension %d "
|
2345 |
|
|
"outside of expected range (%%ld:%%ld)", n+1);
|
2346 |
|
|
|
2347 |
|
|
fault = fold_build2 (LT_EXPR, boolean_type_node, index, tmp_lo);
|
2348 |
|
|
gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
|
2349 |
|
|
fold_convert (long_integer_type_node, index),
|
2350 |
|
|
fold_convert (long_integer_type_node, tmp_lo),
|
2351 |
|
|
fold_convert (long_integer_type_node, tmp_up));
|
2352 |
|
|
fault = fold_build2 (GT_EXPR, boolean_type_node, index, tmp_up);
|
2353 |
|
|
gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
|
2354 |
|
|
fold_convert (long_integer_type_node, index),
|
2355 |
|
|
fold_convert (long_integer_type_node, tmp_lo),
|
2356 |
|
|
fold_convert (long_integer_type_node, tmp_up));
|
2357 |
|
|
gfc_free (msg);
|
2358 |
|
|
}
|
2359 |
|
|
else
|
2360 |
|
|
{
|
2361 |
|
|
tmp_lo = gfc_conv_array_lbound (descriptor, n);
|
2362 |
|
|
|
2363 |
|
|
if (name)
|
2364 |
|
|
asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
|
2365 |
|
|
"below lower bound of %%ld", n+1, name);
|
2366 |
|
|
else
|
2367 |
|
|
asprintf (&msg, "Index '%%ld' of dimension %d "
|
2368 |
|
|
"below lower bound of %%ld", n+1);
|
2369 |
|
|
|
2370 |
|
|
fault = fold_build2 (LT_EXPR, boolean_type_node, index, tmp_lo);
|
2371 |
|
|
gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
|
2372 |
|
|
fold_convert (long_integer_type_node, index),
|
2373 |
|
|
fold_convert (long_integer_type_node, tmp_lo));
|
2374 |
|
|
gfc_free (msg);
|
2375 |
|
|
}
|
2376 |
|
|
|
2377 |
|
|
return index;
|
2378 |
|
|
}
|
2379 |
|
|
|
2380 |
|
|
|
2381 |
|
|
/* Return the offset for an index. Performs bound checking for elemental
|
2382 |
|
|
dimensions. Single element references are processed separately. */
|
2383 |
|
|
|
2384 |
|
|
static tree
|
2385 |
|
|
gfc_conv_array_index_offset (gfc_se * se, gfc_ss_info * info, int dim, int i,
|
2386 |
|
|
gfc_array_ref * ar, tree stride)
|
2387 |
|
|
{
|
2388 |
|
|
tree index;
|
2389 |
|
|
tree desc;
|
2390 |
|
|
tree data;
|
2391 |
|
|
|
2392 |
|
|
/* Get the index into the array for this dimension. */
|
2393 |
|
|
if (ar)
|
2394 |
|
|
{
|
2395 |
|
|
gcc_assert (ar->type != AR_ELEMENT);
|
2396 |
|
|
switch (ar->dimen_type[dim])
|
2397 |
|
|
{
|
2398 |
|
|
case DIMEN_ELEMENT:
|
2399 |
|
|
/* Elemental dimension. */
|
2400 |
|
|
gcc_assert (info->subscript[dim]
|
2401 |
|
|
&& info->subscript[dim]->type == GFC_SS_SCALAR);
|
2402 |
|
|
/* We've already translated this value outside the loop. */
|
2403 |
|
|
index = info->subscript[dim]->data.scalar.expr;
|
2404 |
|
|
|
2405 |
|
|
index = gfc_trans_array_bound_check (se, info->descriptor,
|
2406 |
|
|
index, dim, &ar->where,
|
2407 |
|
|
ar->as->type != AS_ASSUMED_SIZE
|
2408 |
|
|
|| dim < ar->dimen - 1);
|
2409 |
|
|
break;
|
2410 |
|
|
|
2411 |
|
|
case DIMEN_VECTOR:
|
2412 |
|
|
gcc_assert (info && se->loop);
|
2413 |
|
|
gcc_assert (info->subscript[dim]
|
2414 |
|
|
&& info->subscript[dim]->type == GFC_SS_VECTOR);
|
2415 |
|
|
desc = info->subscript[dim]->data.info.descriptor;
|
2416 |
|
|
|
2417 |
|
|
/* Get a zero-based index into the vector. */
|
2418 |
|
|
index = fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
2419 |
|
|
se->loop->loopvar[i], se->loop->from[i]);
|
2420 |
|
|
|
2421 |
|
|
/* Multiply the index by the stride. */
|
2422 |
|
|
index = fold_build2 (MULT_EXPR, gfc_array_index_type,
|
2423 |
|
|
index, gfc_conv_array_stride (desc, 0));
|
2424 |
|
|
|
2425 |
|
|
/* Read the vector to get an index into info->descriptor. */
|
2426 |
|
|
data = build_fold_indirect_ref_loc (input_location,
|
2427 |
|
|
gfc_conv_array_data (desc));
|
2428 |
|
|
index = gfc_build_array_ref (data, index, NULL);
|
2429 |
|
|
index = gfc_evaluate_now (index, &se->pre);
|
2430 |
|
|
|
2431 |
|
|
/* Do any bounds checking on the final info->descriptor index. */
|
2432 |
|
|
index = gfc_trans_array_bound_check (se, info->descriptor,
|
2433 |
|
|
index, dim, &ar->where,
|
2434 |
|
|
ar->as->type != AS_ASSUMED_SIZE
|
2435 |
|
|
|| dim < ar->dimen - 1);
|
2436 |
|
|
break;
|
2437 |
|
|
|
2438 |
|
|
case DIMEN_RANGE:
|
2439 |
|
|
/* Scalarized dimension. */
|
2440 |
|
|
gcc_assert (info && se->loop);
|
2441 |
|
|
|
2442 |
|
|
/* Multiply the loop variable by the stride and delta. */
|
2443 |
|
|
index = se->loop->loopvar[i];
|
2444 |
|
|
if (!integer_onep (info->stride[i]))
|
2445 |
|
|
index = fold_build2 (MULT_EXPR, gfc_array_index_type, index,
|
2446 |
|
|
info->stride[i]);
|
2447 |
|
|
if (!integer_zerop (info->delta[i]))
|
2448 |
|
|
index = fold_build2 (PLUS_EXPR, gfc_array_index_type, index,
|
2449 |
|
|
info->delta[i]);
|
2450 |
|
|
break;
|
2451 |
|
|
|
2452 |
|
|
default:
|
2453 |
|
|
gcc_unreachable ();
|
2454 |
|
|
}
|
2455 |
|
|
}
|
2456 |
|
|
else
|
2457 |
|
|
{
|
2458 |
|
|
/* Temporary array or derived type component. */
|
2459 |
|
|
gcc_assert (se->loop);
|
2460 |
|
|
index = se->loop->loopvar[se->loop->order[i]];
|
2461 |
|
|
if (!integer_zerop (info->delta[i]))
|
2462 |
|
|
index = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
2463 |
|
|
index, info->delta[i]);
|
2464 |
|
|
}
|
2465 |
|
|
|
2466 |
|
|
/* Multiply by the stride. */
|
2467 |
|
|
if (!integer_onep (stride))
|
2468 |
|
|
index = fold_build2 (MULT_EXPR, gfc_array_index_type, index, stride);
|
2469 |
|
|
|
2470 |
|
|
return index;
|
2471 |
|
|
}
|
2472 |
|
|
|
2473 |
|
|
|
2474 |
|
|
/* Build a scalarized reference to an array. */
|
2475 |
|
|
|
2476 |
|
|
static void
|
2477 |
|
|
gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar)
|
2478 |
|
|
{
|
2479 |
|
|
gfc_ss_info *info;
|
2480 |
|
|
tree decl = NULL_TREE;
|
2481 |
|
|
tree index;
|
2482 |
|
|
tree tmp;
|
2483 |
|
|
int n;
|
2484 |
|
|
|
2485 |
|
|
info = &se->ss->data.info;
|
2486 |
|
|
if (ar)
|
2487 |
|
|
n = se->loop->order[0];
|
2488 |
|
|
else
|
2489 |
|
|
n = 0;
|
2490 |
|
|
|
2491 |
|
|
index = gfc_conv_array_index_offset (se, info, info->dim[n], n, ar,
|
2492 |
|
|
info->stride0);
|
2493 |
|
|
/* Add the offset for this dimension to the stored offset for all other
|
2494 |
|
|
dimensions. */
|
2495 |
|
|
if (!integer_zerop (info->offset))
|
2496 |
|
|
index = fold_build2 (PLUS_EXPR, gfc_array_index_type, index, info->offset);
|
2497 |
|
|
|
2498 |
|
|
if (se->ss->expr && is_subref_array (se->ss->expr))
|
2499 |
|
|
decl = se->ss->expr->symtree->n.sym->backend_decl;
|
2500 |
|
|
|
2501 |
|
|
tmp = build_fold_indirect_ref_loc (input_location,
|
2502 |
|
|
info->data);
|
2503 |
|
|
se->expr = gfc_build_array_ref (tmp, index, decl);
|
2504 |
|
|
}
|
2505 |
|
|
|
2506 |
|
|
|
2507 |
|
|
/* Translate access of temporary array. */
|
2508 |
|
|
|
2509 |
|
|
void
|
2510 |
|
|
gfc_conv_tmp_array_ref (gfc_se * se)
|
2511 |
|
|
{
|
2512 |
|
|
se->string_length = se->ss->string_length;
|
2513 |
|
|
gfc_conv_scalarized_array_ref (se, NULL);
|
2514 |
|
|
}
|
2515 |
|
|
|
2516 |
|
|
|
2517 |
|
|
/* Build an array reference. se->expr already holds the array descriptor.
|
2518 |
|
|
This should be either a variable, indirect variable reference or component
|
2519 |
|
|
reference. For arrays which do not have a descriptor, se->expr will be
|
2520 |
|
|
the data pointer.
|
2521 |
|
|
a(i, j, k) = base[offset + i * stride[0] + j * stride[1] + k * stride[2]]*/
|
2522 |
|
|
|
2523 |
|
|
void
|
2524 |
|
|
gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar, gfc_symbol * sym,
|
2525 |
|
|
locus * where)
|
2526 |
|
|
{
|
2527 |
|
|
int n;
|
2528 |
|
|
tree index;
|
2529 |
|
|
tree tmp;
|
2530 |
|
|
tree stride;
|
2531 |
|
|
gfc_se indexse;
|
2532 |
|
|
gfc_se tmpse;
|
2533 |
|
|
|
2534 |
|
|
/* Handle scalarized references separately. */
|
2535 |
|
|
if (ar->type != AR_ELEMENT)
|
2536 |
|
|
{
|
2537 |
|
|
gfc_conv_scalarized_array_ref (se, ar);
|
2538 |
|
|
gfc_advance_se_ss_chain (se);
|
2539 |
|
|
return;
|
2540 |
|
|
}
|
2541 |
|
|
|
2542 |
|
|
index = gfc_index_zero_node;
|
2543 |
|
|
|
2544 |
|
|
/* Calculate the offsets from all the dimensions. */
|
2545 |
|
|
for (n = 0; n < ar->dimen; n++)
|
2546 |
|
|
{
|
2547 |
|
|
/* Calculate the index for this dimension. */
|
2548 |
|
|
gfc_init_se (&indexse, se);
|
2549 |
|
|
gfc_conv_expr_type (&indexse, ar->start[n], gfc_array_index_type);
|
2550 |
|
|
gfc_add_block_to_block (&se->pre, &indexse.pre);
|
2551 |
|
|
|
2552 |
|
|
if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
|
2553 |
|
|
{
|
2554 |
|
|
/* Check array bounds. */
|
2555 |
|
|
tree cond;
|
2556 |
|
|
char *msg;
|
2557 |
|
|
|
2558 |
|
|
/* Evaluate the indexse.expr only once. */
|
2559 |
|
|
indexse.expr = save_expr (indexse.expr);
|
2560 |
|
|
|
2561 |
|
|
/* Lower bound. */
|
2562 |
|
|
tmp = gfc_conv_array_lbound (se->expr, n);
|
2563 |
|
|
if (sym->attr.temporary)
|
2564 |
|
|
{
|
2565 |
|
|
gfc_init_se (&tmpse, se);
|
2566 |
|
|
gfc_conv_expr_type (&tmpse, ar->as->lower[n],
|
2567 |
|
|
gfc_array_index_type);
|
2568 |
|
|
gfc_add_block_to_block (&se->pre, &tmpse.pre);
|
2569 |
|
|
tmp = tmpse.expr;
|
2570 |
|
|
}
|
2571 |
|
|
|
2572 |
|
|
cond = fold_build2 (LT_EXPR, boolean_type_node,
|
2573 |
|
|
indexse.expr, tmp);
|
2574 |
|
|
asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
|
2575 |
|
|
"below lower bound of %%ld", n+1, sym->name);
|
2576 |
|
|
gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
|
2577 |
|
|
fold_convert (long_integer_type_node,
|
2578 |
|
|
indexse.expr),
|
2579 |
|
|
fold_convert (long_integer_type_node, tmp));
|
2580 |
|
|
gfc_free (msg);
|
2581 |
|
|
|
2582 |
|
|
/* Upper bound, but not for the last dimension of assumed-size
|
2583 |
|
|
arrays. */
|
2584 |
|
|
if (n < ar->dimen - 1 || ar->as->type != AS_ASSUMED_SIZE)
|
2585 |
|
|
{
|
2586 |
|
|
tmp = gfc_conv_array_ubound (se->expr, n);
|
2587 |
|
|
if (sym->attr.temporary)
|
2588 |
|
|
{
|
2589 |
|
|
gfc_init_se (&tmpse, se);
|
2590 |
|
|
gfc_conv_expr_type (&tmpse, ar->as->upper[n],
|
2591 |
|
|
gfc_array_index_type);
|
2592 |
|
|
gfc_add_block_to_block (&se->pre, &tmpse.pre);
|
2593 |
|
|
tmp = tmpse.expr;
|
2594 |
|
|
}
|
2595 |
|
|
|
2596 |
|
|
cond = fold_build2 (GT_EXPR, boolean_type_node,
|
2597 |
|
|
indexse.expr, tmp);
|
2598 |
|
|
asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
|
2599 |
|
|
"above upper bound of %%ld", n+1, sym->name);
|
2600 |
|
|
gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
|
2601 |
|
|
fold_convert (long_integer_type_node,
|
2602 |
|
|
indexse.expr),
|
2603 |
|
|
fold_convert (long_integer_type_node, tmp));
|
2604 |
|
|
gfc_free (msg);
|
2605 |
|
|
}
|
2606 |
|
|
}
|
2607 |
|
|
|
2608 |
|
|
/* Multiply the index by the stride. */
|
2609 |
|
|
stride = gfc_conv_array_stride (se->expr, n);
|
2610 |
|
|
tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, indexse.expr,
|
2611 |
|
|
stride);
|
2612 |
|
|
|
2613 |
|
|
/* And add it to the total. */
|
2614 |
|
|
index = fold_build2 (PLUS_EXPR, gfc_array_index_type, index, tmp);
|
2615 |
|
|
}
|
2616 |
|
|
|
2617 |
|
|
tmp = gfc_conv_array_offset (se->expr);
|
2618 |
|
|
if (!integer_zerop (tmp))
|
2619 |
|
|
index = fold_build2 (PLUS_EXPR, gfc_array_index_type, index, tmp);
|
2620 |
|
|
|
2621 |
|
|
/* Access the calculated element. */
|
2622 |
|
|
tmp = gfc_conv_array_data (se->expr);
|
2623 |
|
|
tmp = build_fold_indirect_ref (tmp);
|
2624 |
|
|
se->expr = gfc_build_array_ref (tmp, index, sym->backend_decl);
|
2625 |
|
|
}
|
2626 |
|
|
|
2627 |
|
|
|
2628 |
|
|
/* Generate the code to be executed immediately before entering a
|
2629 |
|
|
scalarization loop. */
|
2630 |
|
|
|
2631 |
|
|
static void
|
2632 |
|
|
gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag,
|
2633 |
|
|
stmtblock_t * pblock)
|
2634 |
|
|
{
|
2635 |
|
|
tree index;
|
2636 |
|
|
tree stride;
|
2637 |
|
|
gfc_ss_info *info;
|
2638 |
|
|
gfc_ss *ss;
|
2639 |
|
|
gfc_se se;
|
2640 |
|
|
int i;
|
2641 |
|
|
|
2642 |
|
|
/* This code will be executed before entering the scalarization loop
|
2643 |
|
|
for this dimension. */
|
2644 |
|
|
for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
|
2645 |
|
|
{
|
2646 |
|
|
if ((ss->useflags & flag) == 0)
|
2647 |
|
|
continue;
|
2648 |
|
|
|
2649 |
|
|
if (ss->type != GFC_SS_SECTION
|
2650 |
|
|
&& ss->type != GFC_SS_FUNCTION && ss->type != GFC_SS_CONSTRUCTOR
|
2651 |
|
|
&& ss->type != GFC_SS_COMPONENT)
|
2652 |
|
|
continue;
|
2653 |
|
|
|
2654 |
|
|
info = &ss->data.info;
|
2655 |
|
|
|
2656 |
|
|
if (dim >= info->dimen)
|
2657 |
|
|
continue;
|
2658 |
|
|
|
2659 |
|
|
if (dim == info->dimen - 1)
|
2660 |
|
|
{
|
2661 |
|
|
/* For the outermost loop calculate the offset due to any
|
2662 |
|
|
elemental dimensions. It will have been initialized with the
|
2663 |
|
|
base offset of the array. */
|
2664 |
|
|
if (info->ref)
|
2665 |
|
|
{
|
2666 |
|
|
for (i = 0; i < info->ref->u.ar.dimen; i++)
|
2667 |
|
|
{
|
2668 |
|
|
if (info->ref->u.ar.dimen_type[i] != DIMEN_ELEMENT)
|
2669 |
|
|
continue;
|
2670 |
|
|
|
2671 |
|
|
gfc_init_se (&se, NULL);
|
2672 |
|
|
se.loop = loop;
|
2673 |
|
|
se.expr = info->descriptor;
|
2674 |
|
|
stride = gfc_conv_array_stride (info->descriptor, i);
|
2675 |
|
|
index = gfc_conv_array_index_offset (&se, info, i, -1,
|
2676 |
|
|
&info->ref->u.ar,
|
2677 |
|
|
stride);
|
2678 |
|
|
gfc_add_block_to_block (pblock, &se.pre);
|
2679 |
|
|
|
2680 |
|
|
info->offset = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
2681 |
|
|
info->offset, index);
|
2682 |
|
|
info->offset = gfc_evaluate_now (info->offset, pblock);
|
2683 |
|
|
}
|
2684 |
|
|
|
2685 |
|
|
i = loop->order[0];
|
2686 |
|
|
stride = gfc_conv_array_stride (info->descriptor, info->dim[i]);
|
2687 |
|
|
}
|
2688 |
|
|
else
|
2689 |
|
|
stride = gfc_conv_array_stride (info->descriptor, 0);
|
2690 |
|
|
|
2691 |
|
|
/* Calculate the stride of the innermost loop. Hopefully this will
|
2692 |
|
|
allow the backend optimizers to do their stuff more effectively.
|
2693 |
|
|
*/
|
2694 |
|
|
info->stride0 = gfc_evaluate_now (stride, pblock);
|
2695 |
|
|
}
|
2696 |
|
|
else
|
2697 |
|
|
{
|
2698 |
|
|
/* Add the offset for the previous loop dimension. */
|
2699 |
|
|
gfc_array_ref *ar;
|
2700 |
|
|
|
2701 |
|
|
if (info->ref)
|
2702 |
|
|
{
|
2703 |
|
|
ar = &info->ref->u.ar;
|
2704 |
|
|
i = loop->order[dim + 1];
|
2705 |
|
|
}
|
2706 |
|
|
else
|
2707 |
|
|
{
|
2708 |
|
|
ar = NULL;
|
2709 |
|
|
i = dim + 1;
|
2710 |
|
|
}
|
2711 |
|
|
|
2712 |
|
|
gfc_init_se (&se, NULL);
|
2713 |
|
|
se.loop = loop;
|
2714 |
|
|
se.expr = info->descriptor;
|
2715 |
|
|
stride = gfc_conv_array_stride (info->descriptor, info->dim[i]);
|
2716 |
|
|
index = gfc_conv_array_index_offset (&se, info, info->dim[i], i,
|
2717 |
|
|
ar, stride);
|
2718 |
|
|
gfc_add_block_to_block (pblock, &se.pre);
|
2719 |
|
|
info->offset = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
2720 |
|
|
info->offset, index);
|
2721 |
|
|
info->offset = gfc_evaluate_now (info->offset, pblock);
|
2722 |
|
|
}
|
2723 |
|
|
|
2724 |
|
|
/* Remember this offset for the second loop. */
|
2725 |
|
|
if (dim == loop->temp_dim - 1)
|
2726 |
|
|
info->saved_offset = info->offset;
|
2727 |
|
|
}
|
2728 |
|
|
}
|
2729 |
|
|
|
2730 |
|
|
|
2731 |
|
|
/* Start a scalarized expression. Creates a scope and declares loop
|
2732 |
|
|
variables. */
|
2733 |
|
|
|
2734 |
|
|
void
|
2735 |
|
|
gfc_start_scalarized_body (gfc_loopinfo * loop, stmtblock_t * pbody)
|
2736 |
|
|
{
|
2737 |
|
|
int dim;
|
2738 |
|
|
int n;
|
2739 |
|
|
int flags;
|
2740 |
|
|
|
2741 |
|
|
gcc_assert (!loop->array_parameter);
|
2742 |
|
|
|
2743 |
|
|
for (dim = loop->dimen - 1; dim >= 0; dim--)
|
2744 |
|
|
{
|
2745 |
|
|
n = loop->order[dim];
|
2746 |
|
|
|
2747 |
|
|
gfc_start_block (&loop->code[n]);
|
2748 |
|
|
|
2749 |
|
|
/* Create the loop variable. */
|
2750 |
|
|
loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "S");
|
2751 |
|
|
|
2752 |
|
|
if (dim < loop->temp_dim)
|
2753 |
|
|
flags = 3;
|
2754 |
|
|
else
|
2755 |
|
|
flags = 1;
|
2756 |
|
|
/* Calculate values that will be constant within this loop. */
|
2757 |
|
|
gfc_trans_preloop_setup (loop, dim, flags, &loop->code[n]);
|
2758 |
|
|
}
|
2759 |
|
|
gfc_start_block (pbody);
|
2760 |
|
|
}
|
2761 |
|
|
|
2762 |
|
|
|
2763 |
|
|
/* Generates the actual loop code for a scalarization loop. */
|
2764 |
|
|
|
2765 |
|
|
void
|
2766 |
|
|
gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n,
|
2767 |
|
|
stmtblock_t * pbody)
|
2768 |
|
|
{
|
2769 |
|
|
stmtblock_t block;
|
2770 |
|
|
tree cond;
|
2771 |
|
|
tree tmp;
|
2772 |
|
|
tree loopbody;
|
2773 |
|
|
tree exit_label;
|
2774 |
|
|
tree stmt;
|
2775 |
|
|
tree init;
|
2776 |
|
|
tree incr;
|
2777 |
|
|
|
2778 |
|
|
if ((ompws_flags & (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS))
|
2779 |
|
|
== (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS)
|
2780 |
|
|
&& n == loop->dimen - 1)
|
2781 |
|
|
{
|
2782 |
|
|
/* We create an OMP_FOR construct for the outermost scalarized loop. */
|
2783 |
|
|
init = make_tree_vec (1);
|
2784 |
|
|
cond = make_tree_vec (1);
|
2785 |
|
|
incr = make_tree_vec (1);
|
2786 |
|
|
|
2787 |
|
|
/* Cycle statement is implemented with a goto. Exit statement must not
|
2788 |
|
|
be present for this loop. */
|
2789 |
|
|
exit_label = gfc_build_label_decl (NULL_TREE);
|
2790 |
|
|
TREE_USED (exit_label) = 1;
|
2791 |
|
|
|
2792 |
|
|
/* Label for cycle statements (if needed). */
|
2793 |
|
|
tmp = build1_v (LABEL_EXPR, exit_label);
|
2794 |
|
|
gfc_add_expr_to_block (pbody, tmp);
|
2795 |
|
|
|
2796 |
|
|
stmt = make_node (OMP_FOR);
|
2797 |
|
|
|
2798 |
|
|
TREE_TYPE (stmt) = void_type_node;
|
2799 |
|
|
OMP_FOR_BODY (stmt) = loopbody = gfc_finish_block (pbody);
|
2800 |
|
|
|
2801 |
|
|
OMP_FOR_CLAUSES (stmt) = build_omp_clause (input_location,
|
2802 |
|
|
OMP_CLAUSE_SCHEDULE);
|
2803 |
|
|
OMP_CLAUSE_SCHEDULE_KIND (OMP_FOR_CLAUSES (stmt))
|
2804 |
|
|
= OMP_CLAUSE_SCHEDULE_STATIC;
|
2805 |
|
|
if (ompws_flags & OMPWS_NOWAIT)
|
2806 |
|
|
OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (stmt))
|
2807 |
|
|
= build_omp_clause (input_location, OMP_CLAUSE_NOWAIT);
|
2808 |
|
|
|
2809 |
|
|
/* Initialize the loopvar. */
|
2810 |
|
|
TREE_VEC_ELT (init, 0) = build2_v (MODIFY_EXPR, loop->loopvar[n],
|
2811 |
|
|
loop->from[n]);
|
2812 |
|
|
OMP_FOR_INIT (stmt) = init;
|
2813 |
|
|
/* The exit condition. */
|
2814 |
|
|
TREE_VEC_ELT (cond, 0) = build2 (LE_EXPR, boolean_type_node,
|
2815 |
|
|
loop->loopvar[n], loop->to[n]);
|
2816 |
|
|
OMP_FOR_COND (stmt) = cond;
|
2817 |
|
|
/* Increment the loopvar. */
|
2818 |
|
|
tmp = build2 (PLUS_EXPR, gfc_array_index_type,
|
2819 |
|
|
loop->loopvar[n], gfc_index_one_node);
|
2820 |
|
|
TREE_VEC_ELT (incr, 0) = fold_build2 (MODIFY_EXPR,
|
2821 |
|
|
void_type_node, loop->loopvar[n], tmp);
|
2822 |
|
|
OMP_FOR_INCR (stmt) = incr;
|
2823 |
|
|
|
2824 |
|
|
ompws_flags &= ~OMPWS_CURR_SINGLEUNIT;
|
2825 |
|
|
gfc_add_expr_to_block (&loop->code[n], stmt);
|
2826 |
|
|
}
|
2827 |
|
|
else
|
2828 |
|
|
{
|
2829 |
|
|
loopbody = gfc_finish_block (pbody);
|
2830 |
|
|
|
2831 |
|
|
/* Initialize the loopvar. */
|
2832 |
|
|
if (loop->loopvar[n] != loop->from[n])
|
2833 |
|
|
gfc_add_modify (&loop->code[n], loop->loopvar[n], loop->from[n]);
|
2834 |
|
|
|
2835 |
|
|
exit_label = gfc_build_label_decl (NULL_TREE);
|
2836 |
|
|
|
2837 |
|
|
/* Generate the loop body. */
|
2838 |
|
|
gfc_init_block (&block);
|
2839 |
|
|
|
2840 |
|
|
/* The exit condition. */
|
2841 |
|
|
cond = fold_build2 (GT_EXPR, boolean_type_node,
|
2842 |
|
|
loop->loopvar[n], loop->to[n]);
|
2843 |
|
|
tmp = build1_v (GOTO_EXPR, exit_label);
|
2844 |
|
|
TREE_USED (exit_label) = 1;
|
2845 |
|
|
tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
|
2846 |
|
|
gfc_add_expr_to_block (&block, tmp);
|
2847 |
|
|
|
2848 |
|
|
/* The main body. */
|
2849 |
|
|
gfc_add_expr_to_block (&block, loopbody);
|
2850 |
|
|
|
2851 |
|
|
/* Increment the loopvar. */
|
2852 |
|
|
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
2853 |
|
|
loop->loopvar[n], gfc_index_one_node);
|
2854 |
|
|
gfc_add_modify (&block, loop->loopvar[n], tmp);
|
2855 |
|
|
|
2856 |
|
|
/* Build the loop. */
|
2857 |
|
|
tmp = gfc_finish_block (&block);
|
2858 |
|
|
tmp = build1_v (LOOP_EXPR, tmp);
|
2859 |
|
|
gfc_add_expr_to_block (&loop->code[n], tmp);
|
2860 |
|
|
|
2861 |
|
|
/* Add the exit label. */
|
2862 |
|
|
tmp = build1_v (LABEL_EXPR, exit_label);
|
2863 |
|
|
gfc_add_expr_to_block (&loop->code[n], tmp);
|
2864 |
|
|
}
|
2865 |
|
|
|
2866 |
|
|
}
|
2867 |
|
|
|
2868 |
|
|
|
2869 |
|
|
/* Finishes and generates the loops for a scalarized expression. */
|
2870 |
|
|
|
2871 |
|
|
void
|
2872 |
|
|
gfc_trans_scalarizing_loops (gfc_loopinfo * loop, stmtblock_t * body)
|
2873 |
|
|
{
|
2874 |
|
|
int dim;
|
2875 |
|
|
int n;
|
2876 |
|
|
gfc_ss *ss;
|
2877 |
|
|
stmtblock_t *pblock;
|
2878 |
|
|
tree tmp;
|
2879 |
|
|
|
2880 |
|
|
pblock = body;
|
2881 |
|
|
/* Generate the loops. */
|
2882 |
|
|
for (dim = 0; dim < loop->dimen; dim++)
|
2883 |
|
|
{
|
2884 |
|
|
n = loop->order[dim];
|
2885 |
|
|
gfc_trans_scalarized_loop_end (loop, n, pblock);
|
2886 |
|
|
loop->loopvar[n] = NULL_TREE;
|
2887 |
|
|
pblock = &loop->code[n];
|
2888 |
|
|
}
|
2889 |
|
|
|
2890 |
|
|
tmp = gfc_finish_block (pblock);
|
2891 |
|
|
gfc_add_expr_to_block (&loop->pre, tmp);
|
2892 |
|
|
|
2893 |
|
|
/* Clear all the used flags. */
|
2894 |
|
|
for (ss = loop->ss; ss; ss = ss->loop_chain)
|
2895 |
|
|
ss->useflags = 0;
|
2896 |
|
|
}
|
2897 |
|
|
|
2898 |
|
|
|
2899 |
|
|
/* Finish the main body of a scalarized expression, and start the secondary
|
2900 |
|
|
copying body. */
|
2901 |
|
|
|
2902 |
|
|
void
|
2903 |
|
|
gfc_trans_scalarized_loop_boundary (gfc_loopinfo * loop, stmtblock_t * body)
|
2904 |
|
|
{
|
2905 |
|
|
int dim;
|
2906 |
|
|
int n;
|
2907 |
|
|
stmtblock_t *pblock;
|
2908 |
|
|
gfc_ss *ss;
|
2909 |
|
|
|
2910 |
|
|
pblock = body;
|
2911 |
|
|
/* We finish as many loops as are used by the temporary. */
|
2912 |
|
|
for (dim = 0; dim < loop->temp_dim - 1; dim++)
|
2913 |
|
|
{
|
2914 |
|
|
n = loop->order[dim];
|
2915 |
|
|
gfc_trans_scalarized_loop_end (loop, n, pblock);
|
2916 |
|
|
loop->loopvar[n] = NULL_TREE;
|
2917 |
|
|
pblock = &loop->code[n];
|
2918 |
|
|
}
|
2919 |
|
|
|
2920 |
|
|
/* We don't want to finish the outermost loop entirely. */
|
2921 |
|
|
n = loop->order[loop->temp_dim - 1];
|
2922 |
|
|
gfc_trans_scalarized_loop_end (loop, n, pblock);
|
2923 |
|
|
|
2924 |
|
|
/* Restore the initial offsets. */
|
2925 |
|
|
for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
|
2926 |
|
|
{
|
2927 |
|
|
if ((ss->useflags & 2) == 0)
|
2928 |
|
|
continue;
|
2929 |
|
|
|
2930 |
|
|
if (ss->type != GFC_SS_SECTION
|
2931 |
|
|
&& ss->type != GFC_SS_FUNCTION && ss->type != GFC_SS_CONSTRUCTOR
|
2932 |
|
|
&& ss->type != GFC_SS_COMPONENT)
|
2933 |
|
|
continue;
|
2934 |
|
|
|
2935 |
|
|
ss->data.info.offset = ss->data.info.saved_offset;
|
2936 |
|
|
}
|
2937 |
|
|
|
2938 |
|
|
/* Restart all the inner loops we just finished. */
|
2939 |
|
|
for (dim = loop->temp_dim - 2; dim >= 0; dim--)
|
2940 |
|
|
{
|
2941 |
|
|
n = loop->order[dim];
|
2942 |
|
|
|
2943 |
|
|
gfc_start_block (&loop->code[n]);
|
2944 |
|
|
|
2945 |
|
|
loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "Q");
|
2946 |
|
|
|
2947 |
|
|
gfc_trans_preloop_setup (loop, dim, 2, &loop->code[n]);
|
2948 |
|
|
}
|
2949 |
|
|
|
2950 |
|
|
/* Start a block for the secondary copying code. */
|
2951 |
|
|
gfc_start_block (body);
|
2952 |
|
|
}
|
2953 |
|
|
|
2954 |
|
|
|
2955 |
|
|
/* Calculate the upper bound of an array section. */
|
2956 |
|
|
|
2957 |
|
|
static tree
|
2958 |
|
|
gfc_conv_section_upper_bound (gfc_ss * ss, int n, stmtblock_t * pblock)
|
2959 |
|
|
{
|
2960 |
|
|
int dim;
|
2961 |
|
|
gfc_expr *end;
|
2962 |
|
|
tree desc;
|
2963 |
|
|
tree bound;
|
2964 |
|
|
gfc_se se;
|
2965 |
|
|
gfc_ss_info *info;
|
2966 |
|
|
|
2967 |
|
|
gcc_assert (ss->type == GFC_SS_SECTION);
|
2968 |
|
|
|
2969 |
|
|
info = &ss->data.info;
|
2970 |
|
|
dim = info->dim[n];
|
2971 |
|
|
|
2972 |
|
|
if (info->ref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
|
2973 |
|
|
/* We'll calculate the upper bound once we have access to the
|
2974 |
|
|
vector's descriptor. */
|
2975 |
|
|
return NULL;
|
2976 |
|
|
|
2977 |
|
|
gcc_assert (info->ref->u.ar.dimen_type[dim] == DIMEN_RANGE);
|
2978 |
|
|
desc = info->descriptor;
|
2979 |
|
|
end = info->ref->u.ar.end[dim];
|
2980 |
|
|
|
2981 |
|
|
if (end)
|
2982 |
|
|
{
|
2983 |
|
|
/* The upper bound was specified. */
|
2984 |
|
|
gfc_init_se (&se, NULL);
|
2985 |
|
|
gfc_conv_expr_type (&se, end, gfc_array_index_type);
|
2986 |
|
|
gfc_add_block_to_block (pblock, &se.pre);
|
2987 |
|
|
bound = se.expr;
|
2988 |
|
|
}
|
2989 |
|
|
else
|
2990 |
|
|
{
|
2991 |
|
|
/* No upper bound was specified, so use the bound of the array. */
|
2992 |
|
|
bound = gfc_conv_array_ubound (desc, dim);
|
2993 |
|
|
}
|
2994 |
|
|
|
2995 |
|
|
return bound;
|
2996 |
|
|
}
|
2997 |
|
|
|
2998 |
|
|
|
2999 |
|
|
/* Calculate the lower bound of an array section. */
|
3000 |
|
|
|
3001 |
|
|
static void
|
3002 |
|
|
gfc_conv_section_startstride (gfc_loopinfo * loop, gfc_ss * ss, int n)
|
3003 |
|
|
{
|
3004 |
|
|
gfc_expr *start;
|
3005 |
|
|
gfc_expr *end;
|
3006 |
|
|
gfc_expr *stride;
|
3007 |
|
|
tree desc;
|
3008 |
|
|
gfc_se se;
|
3009 |
|
|
gfc_ss_info *info;
|
3010 |
|
|
int dim;
|
3011 |
|
|
|
3012 |
|
|
gcc_assert (ss->type == GFC_SS_SECTION);
|
3013 |
|
|
|
3014 |
|
|
info = &ss->data.info;
|
3015 |
|
|
dim = info->dim[n];
|
3016 |
|
|
|
3017 |
|
|
if (info->ref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
|
3018 |
|
|
{
|
3019 |
|
|
/* We use a zero-based index to access the vector. */
|
3020 |
|
|
info->start[n] = gfc_index_zero_node;
|
3021 |
|
|
info->end[n] = gfc_index_zero_node;
|
3022 |
|
|
info->stride[n] = gfc_index_one_node;
|
3023 |
|
|
return;
|
3024 |
|
|
}
|
3025 |
|
|
|
3026 |
|
|
gcc_assert (info->ref->u.ar.dimen_type[dim] == DIMEN_RANGE);
|
3027 |
|
|
desc = info->descriptor;
|
3028 |
|
|
start = info->ref->u.ar.start[dim];
|
3029 |
|
|
end = info->ref->u.ar.end[dim];
|
3030 |
|
|
stride = info->ref->u.ar.stride[dim];
|
3031 |
|
|
|
3032 |
|
|
/* Calculate the start of the range. For vector subscripts this will
|
3033 |
|
|
be the range of the vector. */
|
3034 |
|
|
if (start)
|
3035 |
|
|
{
|
3036 |
|
|
/* Specified section start. */
|
3037 |
|
|
gfc_init_se (&se, NULL);
|
3038 |
|
|
gfc_conv_expr_type (&se, start, gfc_array_index_type);
|
3039 |
|
|
gfc_add_block_to_block (&loop->pre, &se.pre);
|
3040 |
|
|
info->start[n] = se.expr;
|
3041 |
|
|
}
|
3042 |
|
|
else
|
3043 |
|
|
{
|
3044 |
|
|
/* No lower bound specified so use the bound of the array. */
|
3045 |
|
|
info->start[n] = gfc_conv_array_lbound (desc, dim);
|
3046 |
|
|
}
|
3047 |
|
|
info->start[n] = gfc_evaluate_now (info->start[n], &loop->pre);
|
3048 |
|
|
|
3049 |
|
|
/* Similarly calculate the end. Although this is not used in the
|
3050 |
|
|
scalarizer, it is needed when checking bounds and where the end
|
3051 |
|
|
is an expression with side-effects. */
|
3052 |
|
|
if (end)
|
3053 |
|
|
{
|
3054 |
|
|
/* Specified section start. */
|
3055 |
|
|
gfc_init_se (&se, NULL);
|
3056 |
|
|
gfc_conv_expr_type (&se, end, gfc_array_index_type);
|
3057 |
|
|
gfc_add_block_to_block (&loop->pre, &se.pre);
|
3058 |
|
|
info->end[n] = se.expr;
|
3059 |
|
|
}
|
3060 |
|
|
else
|
3061 |
|
|
{
|
3062 |
|
|
/* No upper bound specified so use the bound of the array. */
|
3063 |
|
|
info->end[n] = gfc_conv_array_ubound (desc, dim);
|
3064 |
|
|
}
|
3065 |
|
|
info->end[n] = gfc_evaluate_now (info->end[n], &loop->pre);
|
3066 |
|
|
|
3067 |
|
|
/* Calculate the stride. */
|
3068 |
|
|
if (stride == NULL)
|
3069 |
|
|
info->stride[n] = gfc_index_one_node;
|
3070 |
|
|
else
|
3071 |
|
|
{
|
3072 |
|
|
gfc_init_se (&se, NULL);
|
3073 |
|
|
gfc_conv_expr_type (&se, stride, gfc_array_index_type);
|
3074 |
|
|
gfc_add_block_to_block (&loop->pre, &se.pre);
|
3075 |
|
|
info->stride[n] = gfc_evaluate_now (se.expr, &loop->pre);
|
3076 |
|
|
}
|
3077 |
|
|
}
|
3078 |
|
|
|
3079 |
|
|
|
3080 |
|
|
/* Calculates the range start and stride for a SS chain. Also gets the
|
3081 |
|
|
descriptor and data pointer. The range of vector subscripts is the size
|
3082 |
|
|
of the vector. Array bounds are also checked. */
|
3083 |
|
|
|
3084 |
|
|
void
|
3085 |
|
|
gfc_conv_ss_startstride (gfc_loopinfo * loop)
|
3086 |
|
|
{
|
3087 |
|
|
int n;
|
3088 |
|
|
tree tmp;
|
3089 |
|
|
gfc_ss *ss;
|
3090 |
|
|
tree desc;
|
3091 |
|
|
|
3092 |
|
|
loop->dimen = 0;
|
3093 |
|
|
/* Determine the rank of the loop. */
|
3094 |
|
|
for (ss = loop->ss;
|
3095 |
|
|
ss != gfc_ss_terminator && loop->dimen == 0; ss = ss->loop_chain)
|
3096 |
|
|
{
|
3097 |
|
|
switch (ss->type)
|
3098 |
|
|
{
|
3099 |
|
|
case GFC_SS_SECTION:
|
3100 |
|
|
case GFC_SS_CONSTRUCTOR:
|
3101 |
|
|
case GFC_SS_FUNCTION:
|
3102 |
|
|
case GFC_SS_COMPONENT:
|
3103 |
|
|
loop->dimen = ss->data.info.dimen;
|
3104 |
|
|
break;
|
3105 |
|
|
|
3106 |
|
|
/* As usual, lbound and ubound are exceptions!. */
|
3107 |
|
|
case GFC_SS_INTRINSIC:
|
3108 |
|
|
switch (ss->expr->value.function.isym->id)
|
3109 |
|
|
{
|
3110 |
|
|
case GFC_ISYM_LBOUND:
|
3111 |
|
|
case GFC_ISYM_UBOUND:
|
3112 |
|
|
loop->dimen = ss->data.info.dimen;
|
3113 |
|
|
|
3114 |
|
|
default:
|
3115 |
|
|
break;
|
3116 |
|
|
}
|
3117 |
|
|
|
3118 |
|
|
default:
|
3119 |
|
|
break;
|
3120 |
|
|
}
|
3121 |
|
|
}
|
3122 |
|
|
|
3123 |
|
|
/* We should have determined the rank of the expression by now. If
|
3124 |
|
|
not, that's bad news. */
|
3125 |
|
|
gcc_assert (loop->dimen != 0);
|
3126 |
|
|
|
3127 |
|
|
/* Loop over all the SS in the chain. */
|
3128 |
|
|
for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
|
3129 |
|
|
{
|
3130 |
|
|
if (ss->expr && ss->expr->shape && !ss->shape)
|
3131 |
|
|
ss->shape = ss->expr->shape;
|
3132 |
|
|
|
3133 |
|
|
switch (ss->type)
|
3134 |
|
|
{
|
3135 |
|
|
case GFC_SS_SECTION:
|
3136 |
|
|
/* Get the descriptor for the array. */
|
3137 |
|
|
gfc_conv_ss_descriptor (&loop->pre, ss, !loop->array_parameter);
|
3138 |
|
|
|
3139 |
|
|
for (n = 0; n < ss->data.info.dimen; n++)
|
3140 |
|
|
gfc_conv_section_startstride (loop, ss, n);
|
3141 |
|
|
break;
|
3142 |
|
|
|
3143 |
|
|
case GFC_SS_INTRINSIC:
|
3144 |
|
|
switch (ss->expr->value.function.isym->id)
|
3145 |
|
|
{
|
3146 |
|
|
/* Fall through to supply start and stride. */
|
3147 |
|
|
case GFC_ISYM_LBOUND:
|
3148 |
|
|
case GFC_ISYM_UBOUND:
|
3149 |
|
|
break;
|
3150 |
|
|
default:
|
3151 |
|
|
continue;
|
3152 |
|
|
}
|
3153 |
|
|
|
3154 |
|
|
case GFC_SS_CONSTRUCTOR:
|
3155 |
|
|
case GFC_SS_FUNCTION:
|
3156 |
|
|
for (n = 0; n < ss->data.info.dimen; n++)
|
3157 |
|
|
{
|
3158 |
|
|
ss->data.info.start[n] = gfc_index_zero_node;
|
3159 |
|
|
ss->data.info.end[n] = gfc_index_zero_node;
|
3160 |
|
|
ss->data.info.stride[n] = gfc_index_one_node;
|
3161 |
|
|
}
|
3162 |
|
|
break;
|
3163 |
|
|
|
3164 |
|
|
default:
|
3165 |
|
|
break;
|
3166 |
|
|
}
|
3167 |
|
|
}
|
3168 |
|
|
|
3169 |
|
|
/* The rest is just runtime bound checking. */
|
3170 |
|
|
if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
|
3171 |
|
|
{
|
3172 |
|
|
stmtblock_t block;
|
3173 |
|
|
tree lbound, ubound;
|
3174 |
|
|
tree end;
|
3175 |
|
|
tree size[GFC_MAX_DIMENSIONS];
|
3176 |
|
|
tree stride_pos, stride_neg, non_zerosized, tmp2, tmp3;
|
3177 |
|
|
gfc_ss_info *info;
|
3178 |
|
|
char *msg;
|
3179 |
|
|
int dim;
|
3180 |
|
|
|
3181 |
|
|
gfc_start_block (&block);
|
3182 |
|
|
|
3183 |
|
|
for (n = 0; n < loop->dimen; n++)
|
3184 |
|
|
size[n] = NULL_TREE;
|
3185 |
|
|
|
3186 |
|
|
for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
|
3187 |
|
|
{
|
3188 |
|
|
stmtblock_t inner;
|
3189 |
|
|
|
3190 |
|
|
if (ss->type != GFC_SS_SECTION)
|
3191 |
|
|
continue;
|
3192 |
|
|
|
3193 |
|
|
gfc_start_block (&inner);
|
3194 |
|
|
|
3195 |
|
|
/* TODO: range checking for mapped dimensions. */
|
3196 |
|
|
info = &ss->data.info;
|
3197 |
|
|
|
3198 |
|
|
/* This code only checks ranges. Elemental and vector
|
3199 |
|
|
dimensions are checked later. */
|
3200 |
|
|
for (n = 0; n < loop->dimen; n++)
|
3201 |
|
|
{
|
3202 |
|
|
bool check_upper;
|
3203 |
|
|
|
3204 |
|
|
dim = info->dim[n];
|
3205 |
|
|
if (info->ref->u.ar.dimen_type[dim] != DIMEN_RANGE)
|
3206 |
|
|
continue;
|
3207 |
|
|
|
3208 |
|
|
if (dim == info->ref->u.ar.dimen - 1
|
3209 |
|
|
&& info->ref->u.ar.as->type == AS_ASSUMED_SIZE)
|
3210 |
|
|
check_upper = false;
|
3211 |
|
|
else
|
3212 |
|
|
check_upper = true;
|
3213 |
|
|
|
3214 |
|
|
/* Zero stride is not allowed. */
|
3215 |
|
|
tmp = fold_build2 (EQ_EXPR, boolean_type_node, info->stride[n],
|
3216 |
|
|
gfc_index_zero_node);
|
3217 |
|
|
asprintf (&msg, "Zero stride is not allowed, for dimension %d "
|
3218 |
|
|
"of array '%s'", info->dim[n]+1,
|
3219 |
|
|
ss->expr->symtree->name);
|
3220 |
|
|
gfc_trans_runtime_check (true, false, tmp, &inner,
|
3221 |
|
|
&ss->expr->where, msg);
|
3222 |
|
|
gfc_free (msg);
|
3223 |
|
|
|
3224 |
|
|
desc = ss->data.info.descriptor;
|
3225 |
|
|
|
3226 |
|
|
/* This is the run-time equivalent of resolve.c's
|
3227 |
|
|
check_dimension(). The logical is more readable there
|
3228 |
|
|
than it is here, with all the trees. */
|
3229 |
|
|
lbound = gfc_conv_array_lbound (desc, dim);
|
3230 |
|
|
end = info->end[n];
|
3231 |
|
|
if (check_upper)
|
3232 |
|
|
ubound = gfc_conv_array_ubound (desc, dim);
|
3233 |
|
|
else
|
3234 |
|
|
ubound = NULL;
|
3235 |
|
|
|
3236 |
|
|
/* non_zerosized is true when the selected range is not
|
3237 |
|
|
empty. */
|
3238 |
|
|
stride_pos = fold_build2 (GT_EXPR, boolean_type_node,
|
3239 |
|
|
info->stride[n], gfc_index_zero_node);
|
3240 |
|
|
tmp = fold_build2 (LE_EXPR, boolean_type_node, info->start[n],
|
3241 |
|
|
end);
|
3242 |
|
|
stride_pos = fold_build2 (TRUTH_AND_EXPR, boolean_type_node,
|
3243 |
|
|
stride_pos, tmp);
|
3244 |
|
|
|
3245 |
|
|
stride_neg = fold_build2 (LT_EXPR, boolean_type_node,
|
3246 |
|
|
info->stride[n], gfc_index_zero_node);
|
3247 |
|
|
tmp = fold_build2 (GE_EXPR, boolean_type_node, info->start[n],
|
3248 |
|
|
end);
|
3249 |
|
|
stride_neg = fold_build2 (TRUTH_AND_EXPR, boolean_type_node,
|
3250 |
|
|
stride_neg, tmp);
|
3251 |
|
|
non_zerosized = fold_build2 (TRUTH_OR_EXPR, boolean_type_node,
|
3252 |
|
|
stride_pos, stride_neg);
|
3253 |
|
|
|
3254 |
|
|
/* Check the start of the range against the lower and upper
|
3255 |
|
|
bounds of the array, if the range is not empty.
|
3256 |
|
|
If upper bound is present, include both bounds in the
|
3257 |
|
|
error message. */
|
3258 |
|
|
if (check_upper)
|
3259 |
|
|
{
|
3260 |
|
|
tmp = fold_build2 (LT_EXPR, boolean_type_node,
|
3261 |
|
|
info->start[n], lbound);
|
3262 |
|
|
tmp = fold_build2 (TRUTH_AND_EXPR, boolean_type_node,
|
3263 |
|
|
non_zerosized, tmp);
|
3264 |
|
|
tmp2 = fold_build2 (GT_EXPR, boolean_type_node,
|
3265 |
|
|
info->start[n], ubound);
|
3266 |
|
|
tmp2 = fold_build2 (TRUTH_AND_EXPR, boolean_type_node,
|
3267 |
|
|
non_zerosized, tmp2);
|
3268 |
|
|
asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
|
3269 |
|
|
"outside of expected range (%%ld:%%ld)",
|
3270 |
|
|
info->dim[n]+1, ss->expr->symtree->name);
|
3271 |
|
|
gfc_trans_runtime_check (true, false, tmp, &inner,
|
3272 |
|
|
&ss->expr->where, msg,
|
3273 |
|
|
fold_convert (long_integer_type_node, info->start[n]),
|
3274 |
|
|
fold_convert (long_integer_type_node, lbound),
|
3275 |
|
|
fold_convert (long_integer_type_node, ubound));
|
3276 |
|
|
gfc_trans_runtime_check (true, false, tmp2, &inner,
|
3277 |
|
|
&ss->expr->where, msg,
|
3278 |
|
|
fold_convert (long_integer_type_node, info->start[n]),
|
3279 |
|
|
fold_convert (long_integer_type_node, lbound),
|
3280 |
|
|
fold_convert (long_integer_type_node, ubound));
|
3281 |
|
|
gfc_free (msg);
|
3282 |
|
|
}
|
3283 |
|
|
else
|
3284 |
|
|
{
|
3285 |
|
|
tmp = fold_build2 (LT_EXPR, boolean_type_node,
|
3286 |
|
|
info->start[n], lbound);
|
3287 |
|
|
tmp = fold_build2 (TRUTH_AND_EXPR, boolean_type_node,
|
3288 |
|
|
non_zerosized, tmp);
|
3289 |
|
|
asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
|
3290 |
|
|
"below lower bound of %%ld",
|
3291 |
|
|
info->dim[n]+1, ss->expr->symtree->name);
|
3292 |
|
|
gfc_trans_runtime_check (true, false, tmp, &inner,
|
3293 |
|
|
&ss->expr->where, msg,
|
3294 |
|
|
fold_convert (long_integer_type_node, info->start[n]),
|
3295 |
|
|
fold_convert (long_integer_type_node, lbound));
|
3296 |
|
|
gfc_free (msg);
|
3297 |
|
|
}
|
3298 |
|
|
|
3299 |
|
|
/* Compute the last element of the range, which is not
|
3300 |
|
|
necessarily "end" (think 0:5:3, which doesn't contain 5)
|
3301 |
|
|
and check it against both lower and upper bounds. */
|
3302 |
|
|
|
3303 |
|
|
tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, end,
|
3304 |
|
|
info->start[n]);
|
3305 |
|
|
tmp = fold_build2 (TRUNC_MOD_EXPR, gfc_array_index_type, tmp,
|
3306 |
|
|
info->stride[n]);
|
3307 |
|
|
tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, end,
|
3308 |
|
|
tmp);
|
3309 |
|
|
tmp2 = fold_build2 (LT_EXPR, boolean_type_node, tmp, lbound);
|
3310 |
|
|
tmp2 = fold_build2 (TRUTH_AND_EXPR, boolean_type_node,
|
3311 |
|
|
non_zerosized, tmp2);
|
3312 |
|
|
if (check_upper)
|
3313 |
|
|
{
|
3314 |
|
|
tmp3 = fold_build2 (GT_EXPR, boolean_type_node, tmp, ubound);
|
3315 |
|
|
tmp3 = fold_build2 (TRUTH_AND_EXPR, boolean_type_node,
|
3316 |
|
|
non_zerosized, tmp3);
|
3317 |
|
|
asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
|
3318 |
|
|
"outside of expected range (%%ld:%%ld)",
|
3319 |
|
|
info->dim[n]+1, ss->expr->symtree->name);
|
3320 |
|
|
gfc_trans_runtime_check (true, false, tmp2, &inner,
|
3321 |
|
|
&ss->expr->where, msg,
|
3322 |
|
|
fold_convert (long_integer_type_node, tmp),
|
3323 |
|
|
fold_convert (long_integer_type_node, ubound),
|
3324 |
|
|
fold_convert (long_integer_type_node, lbound));
|
3325 |
|
|
gfc_trans_runtime_check (true, false, tmp3, &inner,
|
3326 |
|
|
&ss->expr->where, msg,
|
3327 |
|
|
fold_convert (long_integer_type_node, tmp),
|
3328 |
|
|
fold_convert (long_integer_type_node, ubound),
|
3329 |
|
|
fold_convert (long_integer_type_node, lbound));
|
3330 |
|
|
gfc_free (msg);
|
3331 |
|
|
}
|
3332 |
|
|
else
|
3333 |
|
|
{
|
3334 |
|
|
asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
|
3335 |
|
|
"below lower bound of %%ld",
|
3336 |
|
|
info->dim[n]+1, ss->expr->symtree->name);
|
3337 |
|
|
gfc_trans_runtime_check (true, false, tmp2, &inner,
|
3338 |
|
|
&ss->expr->where, msg,
|
3339 |
|
|
fold_convert (long_integer_type_node, tmp),
|
3340 |
|
|
fold_convert (long_integer_type_node, lbound));
|
3341 |
|
|
gfc_free (msg);
|
3342 |
|
|
}
|
3343 |
|
|
|
3344 |
|
|
/* Check the section sizes match. */
|
3345 |
|
|
tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, end,
|
3346 |
|
|
info->start[n]);
|
3347 |
|
|
tmp = fold_build2 (FLOOR_DIV_EXPR, gfc_array_index_type, tmp,
|
3348 |
|
|
info->stride[n]);
|
3349 |
|
|
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
3350 |
|
|
gfc_index_one_node, tmp);
|
3351 |
|
|
tmp = fold_build2 (MAX_EXPR, gfc_array_index_type, tmp,
|
3352 |
|
|
build_int_cst (gfc_array_index_type, 0));
|
3353 |
|
|
/* We remember the size of the first section, and check all the
|
3354 |
|
|
others against this. */
|
3355 |
|
|
if (size[n])
|
3356 |
|
|
{
|
3357 |
|
|
tmp3 = fold_build2 (NE_EXPR, boolean_type_node, tmp, size[n]);
|
3358 |
|
|
asprintf (&msg, "Array bound mismatch for dimension %d "
|
3359 |
|
|
"of array '%s' (%%ld/%%ld)",
|
3360 |
|
|
info->dim[n]+1, ss->expr->symtree->name);
|
3361 |
|
|
|
3362 |
|
|
gfc_trans_runtime_check (true, false, tmp3, &inner,
|
3363 |
|
|
&ss->expr->where, msg,
|
3364 |
|
|
fold_convert (long_integer_type_node, tmp),
|
3365 |
|
|
fold_convert (long_integer_type_node, size[n]));
|
3366 |
|
|
|
3367 |
|
|
gfc_free (msg);
|
3368 |
|
|
}
|
3369 |
|
|
else
|
3370 |
|
|
size[n] = gfc_evaluate_now (tmp, &inner);
|
3371 |
|
|
}
|
3372 |
|
|
|
3373 |
|
|
tmp = gfc_finish_block (&inner);
|
3374 |
|
|
|
3375 |
|
|
/* For optional arguments, only check bounds if the argument is
|
3376 |
|
|
present. */
|
3377 |
|
|
if (ss->expr->symtree->n.sym->attr.optional
|
3378 |
|
|
|| ss->expr->symtree->n.sym->attr.not_always_present)
|
3379 |
|
|
tmp = build3_v (COND_EXPR,
|
3380 |
|
|
gfc_conv_expr_present (ss->expr->symtree->n.sym),
|
3381 |
|
|
tmp, build_empty_stmt (input_location));
|
3382 |
|
|
|
3383 |
|
|
gfc_add_expr_to_block (&block, tmp);
|
3384 |
|
|
|
3385 |
|
|
}
|
3386 |
|
|
|
3387 |
|
|
tmp = gfc_finish_block (&block);
|
3388 |
|
|
gfc_add_expr_to_block (&loop->pre, tmp);
|
3389 |
|
|
}
|
3390 |
|
|
}
|
3391 |
|
|
|
3392 |
|
|
|
3393 |
|
|
/* Return true if the two SS could be aliased, i.e. both point to the same data
|
3394 |
|
|
object. */
|
3395 |
|
|
/* TODO: resolve aliases based on frontend expressions. */
|
3396 |
|
|
|
3397 |
|
|
static int
|
3398 |
|
|
gfc_could_be_alias (gfc_ss * lss, gfc_ss * rss)
|
3399 |
|
|
{
|
3400 |
|
|
gfc_ref *lref;
|
3401 |
|
|
gfc_ref *rref;
|
3402 |
|
|
gfc_symbol *lsym;
|
3403 |
|
|
gfc_symbol *rsym;
|
3404 |
|
|
|
3405 |
|
|
lsym = lss->expr->symtree->n.sym;
|
3406 |
|
|
rsym = rss->expr->symtree->n.sym;
|
3407 |
|
|
if (gfc_symbols_could_alias (lsym, rsym))
|
3408 |
|
|
return 1;
|
3409 |
|
|
|
3410 |
|
|
if (rsym->ts.type != BT_DERIVED
|
3411 |
|
|
&& lsym->ts.type != BT_DERIVED)
|
3412 |
|
|
return 0;
|
3413 |
|
|
|
3414 |
|
|
/* For derived types we must check all the component types. We can ignore
|
3415 |
|
|
array references as these will have the same base type as the previous
|
3416 |
|
|
component ref. */
|
3417 |
|
|
for (lref = lss->expr->ref; lref != lss->data.info.ref; lref = lref->next)
|
3418 |
|
|
{
|
3419 |
|
|
if (lref->type != REF_COMPONENT)
|
3420 |
|
|
continue;
|
3421 |
|
|
|
3422 |
|
|
if (gfc_symbols_could_alias (lref->u.c.sym, rsym))
|
3423 |
|
|
return 1;
|
3424 |
|
|
|
3425 |
|
|
for (rref = rss->expr->ref; rref != rss->data.info.ref;
|
3426 |
|
|
rref = rref->next)
|
3427 |
|
|
{
|
3428 |
|
|
if (rref->type != REF_COMPONENT)
|
3429 |
|
|
continue;
|
3430 |
|
|
|
3431 |
|
|
if (gfc_symbols_could_alias (lref->u.c.sym, rref->u.c.sym))
|
3432 |
|
|
return 1;
|
3433 |
|
|
}
|
3434 |
|
|
}
|
3435 |
|
|
|
3436 |
|
|
for (rref = rss->expr->ref; rref != rss->data.info.ref; rref = rref->next)
|
3437 |
|
|
{
|
3438 |
|
|
if (rref->type != REF_COMPONENT)
|
3439 |
|
|
break;
|
3440 |
|
|
|
3441 |
|
|
if (gfc_symbols_could_alias (rref->u.c.sym, lsym))
|
3442 |
|
|
return 1;
|
3443 |
|
|
}
|
3444 |
|
|
|
3445 |
|
|
return 0;
|
3446 |
|
|
}
|
3447 |
|
|
|
3448 |
|
|
|
3449 |
|
|
/* Resolve array data dependencies. Creates a temporary if required. */
|
3450 |
|
|
/* TODO: Calc dependencies with gfc_expr rather than gfc_ss, and move to
|
3451 |
|
|
dependency.c. */
|
3452 |
|
|
|
3453 |
|
|
void
|
3454 |
|
|
gfc_conv_resolve_dependencies (gfc_loopinfo * loop, gfc_ss * dest,
|
3455 |
|
|
gfc_ss * rss)
|
3456 |
|
|
{
|
3457 |
|
|
gfc_ss *ss;
|
3458 |
|
|
gfc_ref *lref;
|
3459 |
|
|
gfc_ref *rref;
|
3460 |
|
|
int nDepend = 0;
|
3461 |
|
|
|
3462 |
|
|
loop->temp_ss = NULL;
|
3463 |
|
|
|
3464 |
|
|
for (ss = rss; ss != gfc_ss_terminator; ss = ss->next)
|
3465 |
|
|
{
|
3466 |
|
|
if (ss->type != GFC_SS_SECTION)
|
3467 |
|
|
continue;
|
3468 |
|
|
|
3469 |
|
|
if (dest->expr->symtree->n.sym != ss->expr->symtree->n.sym)
|
3470 |
|
|
{
|
3471 |
|
|
if (gfc_could_be_alias (dest, ss)
|
3472 |
|
|
|| gfc_are_equivalenced_arrays (dest->expr, ss->expr))
|
3473 |
|
|
{
|
3474 |
|
|
nDepend = 1;
|
3475 |
|
|
break;
|
3476 |
|
|
}
|
3477 |
|
|
}
|
3478 |
|
|
else
|
3479 |
|
|
{
|
3480 |
|
|
lref = dest->expr->ref;
|
3481 |
|
|
rref = ss->expr->ref;
|
3482 |
|
|
|
3483 |
|
|
nDepend = gfc_dep_resolver (lref, rref);
|
3484 |
|
|
if (nDepend == 1)
|
3485 |
|
|
break;
|
3486 |
|
|
#if 0
|
3487 |
|
|
/* TODO : loop shifting. */
|
3488 |
|
|
if (nDepend == 1)
|
3489 |
|
|
{
|
3490 |
|
|
/* Mark the dimensions for LOOP SHIFTING */
|
3491 |
|
|
for (n = 0; n < loop->dimen; n++)
|
3492 |
|
|
{
|
3493 |
|
|
int dim = dest->data.info.dim[n];
|
3494 |
|
|
|
3495 |
|
|
if (lref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
|
3496 |
|
|
depends[n] = 2;
|
3497 |
|
|
else if (! gfc_is_same_range (&lref->u.ar,
|
3498 |
|
|
&rref->u.ar, dim, 0))
|
3499 |
|
|
depends[n] = 1;
|
3500 |
|
|
}
|
3501 |
|
|
|
3502 |
|
|
/* Put all the dimensions with dependencies in the
|
3503 |
|
|
innermost loops. */
|
3504 |
|
|
dim = 0;
|
3505 |
|
|
for (n = 0; n < loop->dimen; n++)
|
3506 |
|
|
{
|
3507 |
|
|
gcc_assert (loop->order[n] == n);
|
3508 |
|
|
if (depends[n])
|
3509 |
|
|
loop->order[dim++] = n;
|
3510 |
|
|
}
|
3511 |
|
|
for (n = 0; n < loop->dimen; n++)
|
3512 |
|
|
{
|
3513 |
|
|
if (! depends[n])
|
3514 |
|
|
loop->order[dim++] = n;
|
3515 |
|
|
}
|
3516 |
|
|
|
3517 |
|
|
gcc_assert (dim == loop->dimen);
|
3518 |
|
|
break;
|
3519 |
|
|
}
|
3520 |
|
|
#endif
|
3521 |
|
|
}
|
3522 |
|
|
}
|
3523 |
|
|
|
3524 |
|
|
if (nDepend == 1)
|
3525 |
|
|
{
|
3526 |
|
|
tree base_type = gfc_typenode_for_spec (&dest->expr->ts);
|
3527 |
|
|
if (GFC_ARRAY_TYPE_P (base_type)
|
3528 |
|
|
|| GFC_DESCRIPTOR_TYPE_P (base_type))
|
3529 |
|
|
base_type = gfc_get_element_type (base_type);
|
3530 |
|
|
loop->temp_ss = gfc_get_ss ();
|
3531 |
|
|
loop->temp_ss->type = GFC_SS_TEMP;
|
3532 |
|
|
loop->temp_ss->data.temp.type = base_type;
|
3533 |
|
|
loop->temp_ss->string_length = dest->string_length;
|
3534 |
|
|
loop->temp_ss->data.temp.dimen = loop->dimen;
|
3535 |
|
|
loop->temp_ss->next = gfc_ss_terminator;
|
3536 |
|
|
gfc_add_ss_to_loop (loop, loop->temp_ss);
|
3537 |
|
|
}
|
3538 |
|
|
else
|
3539 |
|
|
loop->temp_ss = NULL;
|
3540 |
|
|
}
|
3541 |
|
|
|
3542 |
|
|
|
3543 |
|
|
/* Initialize the scalarization loop. Creates the loop variables. Determines
|
3544 |
|
|
the range of the loop variables. Creates a temporary if required.
|
3545 |
|
|
Calculates how to transform from loop variables to array indices for each
|
3546 |
|
|
expression. Also generates code for scalar expressions which have been
|
3547 |
|
|
moved outside the loop. */
|
3548 |
|
|
|
3549 |
|
|
void
|
3550 |
|
|
gfc_conv_loop_setup (gfc_loopinfo * loop, locus * where)
|
3551 |
|
|
{
|
3552 |
|
|
int n;
|
3553 |
|
|
gfc_ss_info *info;
|
3554 |
|
|
gfc_ss_info *specinfo;
|
3555 |
|
|
gfc_ss *ss;
|
3556 |
|
|
tree tmp;
|
3557 |
|
|
gfc_ss *loopspec[GFC_MAX_DIMENSIONS];
|
3558 |
|
|
bool dynamic[GFC_MAX_DIMENSIONS];
|
3559 |
|
|
gfc_constructor *c;
|
3560 |
|
|
mpz_t *cshape;
|
3561 |
|
|
mpz_t i;
|
3562 |
|
|
|
3563 |
|
|
mpz_init (i);
|
3564 |
|
|
for (n = 0; n < loop->dimen; n++)
|
3565 |
|
|
{
|
3566 |
|
|
loopspec[n] = NULL;
|
3567 |
|
|
dynamic[n] = false;
|
3568 |
|
|
/* We use one SS term, and use that to determine the bounds of the
|
3569 |
|
|
loop for this dimension. We try to pick the simplest term. */
|
3570 |
|
|
for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
|
3571 |
|
|
{
|
3572 |
|
|
if (ss->shape)
|
3573 |
|
|
{
|
3574 |
|
|
/* The frontend has worked out the size for us. */
|
3575 |
|
|
if (!loopspec[n] || !loopspec[n]->shape
|
3576 |
|
|
|| !integer_zerop (loopspec[n]->data.info.start[n]))
|
3577 |
|
|
/* Prefer zero-based descriptors if possible. */
|
3578 |
|
|
loopspec[n] = ss;
|
3579 |
|
|
continue;
|
3580 |
|
|
}
|
3581 |
|
|
|
3582 |
|
|
if (ss->type == GFC_SS_CONSTRUCTOR)
|
3583 |
|
|
{
|
3584 |
|
|
/* An unknown size constructor will always be rank one.
|
3585 |
|
|
Higher rank constructors will either have known shape,
|
3586 |
|
|
or still be wrapped in a call to reshape. */
|
3587 |
|
|
gcc_assert (loop->dimen == 1);
|
3588 |
|
|
|
3589 |
|
|
/* Always prefer to use the constructor bounds if the size
|
3590 |
|
|
can be determined at compile time. Prefer not to otherwise,
|
3591 |
|
|
since the general case involves realloc, and it's better to
|
3592 |
|
|
avoid that overhead if possible. */
|
3593 |
|
|
c = ss->expr->value.constructor;
|
3594 |
|
|
dynamic[n] = gfc_get_array_constructor_size (&i, c);
|
3595 |
|
|
if (!dynamic[n] || !loopspec[n])
|
3596 |
|
|
loopspec[n] = ss;
|
3597 |
|
|
continue;
|
3598 |
|
|
}
|
3599 |
|
|
|
3600 |
|
|
/* TODO: Pick the best bound if we have a choice between a
|
3601 |
|
|
function and something else. */
|
3602 |
|
|
if (ss->type == GFC_SS_FUNCTION)
|
3603 |
|
|
{
|
3604 |
|
|
loopspec[n] = ss;
|
3605 |
|
|
continue;
|
3606 |
|
|
}
|
3607 |
|
|
|
3608 |
|
|
if (ss->type != GFC_SS_SECTION)
|
3609 |
|
|
continue;
|
3610 |
|
|
|
3611 |
|
|
if (loopspec[n])
|
3612 |
|
|
specinfo = &loopspec[n]->data.info;
|
3613 |
|
|
else
|
3614 |
|
|
specinfo = NULL;
|
3615 |
|
|
info = &ss->data.info;
|
3616 |
|
|
|
3617 |
|
|
if (!specinfo)
|
3618 |
|
|
loopspec[n] = ss;
|
3619 |
|
|
/* Criteria for choosing a loop specifier (most important first):
|
3620 |
|
|
doesn't need realloc
|
3621 |
|
|
stride of one
|
3622 |
|
|
known stride
|
3623 |
|
|
known lower bound
|
3624 |
|
|
known upper bound
|
3625 |
|
|
*/
|
3626 |
|
|
else if (loopspec[n]->type == GFC_SS_CONSTRUCTOR && dynamic[n])
|
3627 |
|
|
loopspec[n] = ss;
|
3628 |
|
|
else if (integer_onep (info->stride[n])
|
3629 |
|
|
&& !integer_onep (specinfo->stride[n]))
|
3630 |
|
|
loopspec[n] = ss;
|
3631 |
|
|
else if (INTEGER_CST_P (info->stride[n])
|
3632 |
|
|
&& !INTEGER_CST_P (specinfo->stride[n]))
|
3633 |
|
|
loopspec[n] = ss;
|
3634 |
|
|
else if (INTEGER_CST_P (info->start[n])
|
3635 |
|
|
&& !INTEGER_CST_P (specinfo->start[n]))
|
3636 |
|
|
loopspec[n] = ss;
|
3637 |
|
|
/* We don't work out the upper bound.
|
3638 |
|
|
else if (INTEGER_CST_P (info->finish[n])
|
3639 |
|
|
&& ! INTEGER_CST_P (specinfo->finish[n]))
|
3640 |
|
|
loopspec[n] = ss; */
|
3641 |
|
|
}
|
3642 |
|
|
|
3643 |
|
|
/* We should have found the scalarization loop specifier. If not,
|
3644 |
|
|
that's bad news. */
|
3645 |
|
|
gcc_assert (loopspec[n]);
|
3646 |
|
|
|
3647 |
|
|
info = &loopspec[n]->data.info;
|
3648 |
|
|
|
3649 |
|
|
/* Set the extents of this range. */
|
3650 |
|
|
cshape = loopspec[n]->shape;
|
3651 |
|
|
if (cshape && INTEGER_CST_P (info->start[n])
|
3652 |
|
|
&& INTEGER_CST_P (info->stride[n]))
|
3653 |
|
|
{
|
3654 |
|
|
loop->from[n] = info->start[n];
|
3655 |
|
|
mpz_set (i, cshape[n]);
|
3656 |
|
|
mpz_sub_ui (i, i, 1);
|
3657 |
|
|
/* To = from + (size - 1) * stride. */
|
3658 |
|
|
tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind);
|
3659 |
|
|
if (!integer_onep (info->stride[n]))
|
3660 |
|
|
tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
|
3661 |
|
|
tmp, info->stride[n]);
|
3662 |
|
|
loop->to[n] = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
3663 |
|
|
loop->from[n], tmp);
|
3664 |
|
|
}
|
3665 |
|
|
else
|
3666 |
|
|
{
|
3667 |
|
|
loop->from[n] = info->start[n];
|
3668 |
|
|
switch (loopspec[n]->type)
|
3669 |
|
|
{
|
3670 |
|
|
case GFC_SS_CONSTRUCTOR:
|
3671 |
|
|
/* The upper bound is calculated when we expand the
|
3672 |
|
|
constructor. */
|
3673 |
|
|
gcc_assert (loop->to[n] == NULL_TREE);
|
3674 |
|
|
break;
|
3675 |
|
|
|
3676 |
|
|
case GFC_SS_SECTION:
|
3677 |
|
|
/* Use the end expression if it exists and is not constant,
|
3678 |
|
|
so that it is only evaluated once. */
|
3679 |
|
|
if (info->end[n] && !INTEGER_CST_P (info->end[n]))
|
3680 |
|
|
loop->to[n] = info->end[n];
|
3681 |
|
|
else
|
3682 |
|
|
loop->to[n] = gfc_conv_section_upper_bound (loopspec[n], n,
|
3683 |
|
|
&loop->pre);
|
3684 |
|
|
break;
|
3685 |
|
|
|
3686 |
|
|
case GFC_SS_FUNCTION:
|
3687 |
|
|
/* The loop bound will be set when we generate the call. */
|
3688 |
|
|
gcc_assert (loop->to[n] == NULL_TREE);
|
3689 |
|
|
break;
|
3690 |
|
|
|
3691 |
|
|
default:
|
3692 |
|
|
gcc_unreachable ();
|
3693 |
|
|
}
|
3694 |
|
|
}
|
3695 |
|
|
|
3696 |
|
|
/* Transform everything so we have a simple incrementing variable. */
|
3697 |
|
|
if (integer_onep (info->stride[n]))
|
3698 |
|
|
info->delta[n] = gfc_index_zero_node;
|
3699 |
|
|
else
|
3700 |
|
|
{
|
3701 |
|
|
/* Set the delta for this section. */
|
3702 |
|
|
info->delta[n] = gfc_evaluate_now (loop->from[n], &loop->pre);
|
3703 |
|
|
/* Number of iterations is (end - start + step) / step.
|
3704 |
|
|
with start = 0, this simplifies to
|
3705 |
|
|
last = end / step;
|
3706 |
|
|
for (i = 0; i<=last; i++){...}; */
|
3707 |
|
|
tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
3708 |
|
|
loop->to[n], loop->from[n]);
|
3709 |
|
|
tmp = fold_build2 (FLOOR_DIV_EXPR, gfc_array_index_type,
|
3710 |
|
|
tmp, info->stride[n]);
|
3711 |
|
|
tmp = fold_build2 (MAX_EXPR, gfc_array_index_type, tmp,
|
3712 |
|
|
build_int_cst (gfc_array_index_type, -1));
|
3713 |
|
|
loop->to[n] = gfc_evaluate_now (tmp, &loop->pre);
|
3714 |
|
|
/* Make the loop variable start at 0. */
|
3715 |
|
|
loop->from[n] = gfc_index_zero_node;
|
3716 |
|
|
}
|
3717 |
|
|
}
|
3718 |
|
|
|
3719 |
|
|
/* Add all the scalar code that can be taken out of the loops.
|
3720 |
|
|
This may include calculating the loop bounds, so do it before
|
3721 |
|
|
allocating the temporary. */
|
3722 |
|
|
gfc_add_loop_ss_code (loop, loop->ss, false, where);
|
3723 |
|
|
|
3724 |
|
|
/* If we want a temporary then create it. */
|
3725 |
|
|
if (loop->temp_ss != NULL)
|
3726 |
|
|
{
|
3727 |
|
|
gcc_assert (loop->temp_ss->type == GFC_SS_TEMP);
|
3728 |
|
|
|
3729 |
|
|
/* Make absolutely sure that this is a complete type. */
|
3730 |
|
|
if (loop->temp_ss->string_length)
|
3731 |
|
|
loop->temp_ss->data.temp.type
|
3732 |
|
|
= gfc_get_character_type_len_for_eltype
|
3733 |
|
|
(TREE_TYPE (loop->temp_ss->data.temp.type),
|
3734 |
|
|
loop->temp_ss->string_length);
|
3735 |
|
|
|
3736 |
|
|
tmp = loop->temp_ss->data.temp.type;
|
3737 |
|
|
n = loop->temp_ss->data.temp.dimen;
|
3738 |
|
|
memset (&loop->temp_ss->data.info, 0, sizeof (gfc_ss_info));
|
3739 |
|
|
loop->temp_ss->type = GFC_SS_SECTION;
|
3740 |
|
|
loop->temp_ss->data.info.dimen = n;
|
3741 |
|
|
gfc_trans_create_temp_array (&loop->pre, &loop->post, loop,
|
3742 |
|
|
&loop->temp_ss->data.info, tmp, NULL_TREE,
|
3743 |
|
|
false, true, false, where);
|
3744 |
|
|
}
|
3745 |
|
|
|
3746 |
|
|
for (n = 0; n < loop->temp_dim; n++)
|
3747 |
|
|
loopspec[loop->order[n]] = NULL;
|
3748 |
|
|
|
3749 |
|
|
mpz_clear (i);
|
3750 |
|
|
|
3751 |
|
|
/* For array parameters we don't have loop variables, so don't calculate the
|
3752 |
|
|
translations. */
|
3753 |
|
|
if (loop->array_parameter)
|
3754 |
|
|
return;
|
3755 |
|
|
|
3756 |
|
|
/* Calculate the translation from loop variables to array indices. */
|
3757 |
|
|
for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
|
3758 |
|
|
{
|
3759 |
|
|
if (ss->type != GFC_SS_SECTION && ss->type != GFC_SS_COMPONENT
|
3760 |
|
|
&& ss->type != GFC_SS_CONSTRUCTOR)
|
3761 |
|
|
|
3762 |
|
|
continue;
|
3763 |
|
|
|
3764 |
|
|
info = &ss->data.info;
|
3765 |
|
|
|
3766 |
|
|
for (n = 0; n < info->dimen; n++)
|
3767 |
|
|
{
|
3768 |
|
|
/* If we are specifying the range the delta is already set. */
|
3769 |
|
|
if (loopspec[n] != ss)
|
3770 |
|
|
{
|
3771 |
|
|
/* Calculate the offset relative to the loop variable.
|
3772 |
|
|
First multiply by the stride. */
|
3773 |
|
|
tmp = loop->from[n];
|
3774 |
|
|
if (!integer_onep (info->stride[n]))
|
3775 |
|
|
tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
|
3776 |
|
|
tmp, info->stride[n]);
|
3777 |
|
|
|
3778 |
|
|
/* Then subtract this from our starting value. */
|
3779 |
|
|
tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
3780 |
|
|
info->start[n], tmp);
|
3781 |
|
|
|
3782 |
|
|
info->delta[n] = gfc_evaluate_now (tmp, &loop->pre);
|
3783 |
|
|
}
|
3784 |
|
|
}
|
3785 |
|
|
}
|
3786 |
|
|
}
|
3787 |
|
|
|
3788 |
|
|
|
3789 |
|
|
/* Fills in an array descriptor, and returns the size of the array. The size
|
3790 |
|
|
will be a simple_val, ie a variable or a constant. Also calculates the
|
3791 |
|
|
offset of the base. Returns the size of the array.
|
3792 |
|
|
{
|
3793 |
|
|
stride = 1;
|
3794 |
|
|
offset = 0;
|
3795 |
|
|
for (n = 0; n < rank; n++)
|
3796 |
|
|
{
|
3797 |
|
|
a.lbound[n] = specified_lower_bound;
|
3798 |
|
|
offset = offset + a.lbond[n] * stride;
|
3799 |
|
|
size = 1 - lbound;
|
3800 |
|
|
a.ubound[n] = specified_upper_bound;
|
3801 |
|
|
a.stride[n] = stride;
|
3802 |
|
|
size = siz >= 0 ? ubound + size : 0; //size = ubound + 1 - lbound
|
3803 |
|
|
stride = stride * size;
|
3804 |
|
|
}
|
3805 |
|
|
return (stride);
|
3806 |
|
|
} */
|
3807 |
|
|
/*GCC ARRAYS*/
|
3808 |
|
|
|
3809 |
|
|
static tree
|
3810 |
|
|
gfc_array_init_size (tree descriptor, int rank, tree * poffset,
|
3811 |
|
|
gfc_expr ** lower, gfc_expr ** upper,
|
3812 |
|
|
stmtblock_t * pblock)
|
3813 |
|
|
{
|
3814 |
|
|
tree type;
|
3815 |
|
|
tree tmp;
|
3816 |
|
|
tree size;
|
3817 |
|
|
tree offset;
|
3818 |
|
|
tree stride;
|
3819 |
|
|
tree cond;
|
3820 |
|
|
tree or_expr;
|
3821 |
|
|
tree thencase;
|
3822 |
|
|
tree elsecase;
|
3823 |
|
|
tree var;
|
3824 |
|
|
stmtblock_t thenblock;
|
3825 |
|
|
stmtblock_t elseblock;
|
3826 |
|
|
gfc_expr *ubound;
|
3827 |
|
|
gfc_se se;
|
3828 |
|
|
int n;
|
3829 |
|
|
|
3830 |
|
|
type = TREE_TYPE (descriptor);
|
3831 |
|
|
|
3832 |
|
|
stride = gfc_index_one_node;
|
3833 |
|
|
offset = gfc_index_zero_node;
|
3834 |
|
|
|
3835 |
|
|
/* Set the dtype. */
|
3836 |
|
|
tmp = gfc_conv_descriptor_dtype (descriptor);
|
3837 |
|
|
gfc_add_modify (pblock, tmp, gfc_get_dtype (TREE_TYPE (descriptor)));
|
3838 |
|
|
|
3839 |
|
|
or_expr = NULL_TREE;
|
3840 |
|
|
|
3841 |
|
|
for (n = 0; n < rank; n++)
|
3842 |
|
|
{
|
3843 |
|
|
/* We have 3 possibilities for determining the size of the array:
|
3844 |
|
|
lower == NULL => lbound = 1, ubound = upper[n]
|
3845 |
|
|
upper[n] = NULL => lbound = 1, ubound = lower[n]
|
3846 |
|
|
upper[n] != NULL => lbound = lower[n], ubound = upper[n] */
|
3847 |
|
|
ubound = upper[n];
|
3848 |
|
|
|
3849 |
|
|
/* Set lower bound. */
|
3850 |
|
|
gfc_init_se (&se, NULL);
|
3851 |
|
|
if (lower == NULL)
|
3852 |
|
|
se.expr = gfc_index_one_node;
|
3853 |
|
|
else
|
3854 |
|
|
{
|
3855 |
|
|
gcc_assert (lower[n]);
|
3856 |
|
|
if (ubound)
|
3857 |
|
|
{
|
3858 |
|
|
gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
|
3859 |
|
|
gfc_add_block_to_block (pblock, &se.pre);
|
3860 |
|
|
}
|
3861 |
|
|
else
|
3862 |
|
|
{
|
3863 |
|
|
se.expr = gfc_index_one_node;
|
3864 |
|
|
ubound = lower[n];
|
3865 |
|
|
}
|
3866 |
|
|
}
|
3867 |
|
|
gfc_conv_descriptor_lbound_set (pblock, descriptor, gfc_rank_cst[n],
|
3868 |
|
|
se.expr);
|
3869 |
|
|
|
3870 |
|
|
/* Work out the offset for this component. */
|
3871 |
|
|
tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, se.expr, stride);
|
3872 |
|
|
offset = fold_build2 (MINUS_EXPR, gfc_array_index_type, offset, tmp);
|
3873 |
|
|
|
3874 |
|
|
/* Start the calculation for the size of this dimension. */
|
3875 |
|
|
size = fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
3876 |
|
|
gfc_index_one_node, se.expr);
|
3877 |
|
|
|
3878 |
|
|
/* Set upper bound. */
|
3879 |
|
|
gfc_init_se (&se, NULL);
|
3880 |
|
|
gcc_assert (ubound);
|
3881 |
|
|
gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
|
3882 |
|
|
gfc_add_block_to_block (pblock, &se.pre);
|
3883 |
|
|
|
3884 |
|
|
gfc_conv_descriptor_ubound_set (pblock, descriptor, gfc_rank_cst[n], se.expr);
|
3885 |
|
|
|
3886 |
|
|
/* Store the stride. */
|
3887 |
|
|
gfc_conv_descriptor_stride_set (pblock, descriptor, gfc_rank_cst[n], stride);
|
3888 |
|
|
|
3889 |
|
|
/* Calculate the size of this dimension. */
|
3890 |
|
|
size = fold_build2 (PLUS_EXPR, gfc_array_index_type, se.expr, size);
|
3891 |
|
|
|
3892 |
|
|
/* Check whether the size for this dimension is negative. */
|
3893 |
|
|
cond = fold_build2 (LE_EXPR, boolean_type_node, size,
|
3894 |
|
|
gfc_index_zero_node);
|
3895 |
|
|
if (n == 0)
|
3896 |
|
|
or_expr = cond;
|
3897 |
|
|
else
|
3898 |
|
|
or_expr = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, or_expr, cond);
|
3899 |
|
|
|
3900 |
|
|
size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
|
3901 |
|
|
gfc_index_zero_node, size);
|
3902 |
|
|
|
3903 |
|
|
/* Multiply the stride by the number of elements in this dimension. */
|
3904 |
|
|
stride = fold_build2 (MULT_EXPR, gfc_array_index_type, stride, size);
|
3905 |
|
|
stride = gfc_evaluate_now (stride, pblock);
|
3906 |
|
|
}
|
3907 |
|
|
|
3908 |
|
|
/* The stride is the number of elements in the array, so multiply by the
|
3909 |
|
|
size of an element to get the total size. */
|
3910 |
|
|
tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
|
3911 |
|
|
size = fold_build2 (MULT_EXPR, gfc_array_index_type, stride,
|
3912 |
|
|
fold_convert (gfc_array_index_type, tmp));
|
3913 |
|
|
|
3914 |
|
|
if (poffset != NULL)
|
3915 |
|
|
{
|
3916 |
|
|
offset = gfc_evaluate_now (offset, pblock);
|
3917 |
|
|
*poffset = offset;
|
3918 |
|
|
}
|
3919 |
|
|
|
3920 |
|
|
if (integer_zerop (or_expr))
|
3921 |
|
|
return size;
|
3922 |
|
|
if (integer_onep (or_expr))
|
3923 |
|
|
return gfc_index_zero_node;
|
3924 |
|
|
|
3925 |
|
|
var = gfc_create_var (TREE_TYPE (size), "size");
|
3926 |
|
|
gfc_start_block (&thenblock);
|
3927 |
|
|
gfc_add_modify (&thenblock, var, gfc_index_zero_node);
|
3928 |
|
|
thencase = gfc_finish_block (&thenblock);
|
3929 |
|
|
|
3930 |
|
|
gfc_start_block (&elseblock);
|
3931 |
|
|
gfc_add_modify (&elseblock, var, size);
|
3932 |
|
|
elsecase = gfc_finish_block (&elseblock);
|
3933 |
|
|
|
3934 |
|
|
tmp = gfc_evaluate_now (or_expr, pblock);
|
3935 |
|
|
tmp = build3_v (COND_EXPR, tmp, thencase, elsecase);
|
3936 |
|
|
gfc_add_expr_to_block (pblock, tmp);
|
3937 |
|
|
|
3938 |
|
|
return var;
|
3939 |
|
|
}
|
3940 |
|
|
|
3941 |
|
|
|
3942 |
|
|
/* Initializes the descriptor and generates a call to _gfor_allocate. Does
|
3943 |
|
|
the work for an ALLOCATE statement. */
|
3944 |
|
|
/*GCC ARRAYS*/
|
3945 |
|
|
|
3946 |
|
|
bool
|
3947 |
|
|
gfc_array_allocate (gfc_se * se, gfc_expr * expr, tree pstat)
|
3948 |
|
|
{
|
3949 |
|
|
tree tmp;
|
3950 |
|
|
tree pointer;
|
3951 |
|
|
tree offset;
|
3952 |
|
|
tree size;
|
3953 |
|
|
gfc_expr **lower;
|
3954 |
|
|
gfc_expr **upper;
|
3955 |
|
|
gfc_ref *ref, *prev_ref = NULL;
|
3956 |
|
|
bool allocatable_array;
|
3957 |
|
|
|
3958 |
|
|
ref = expr->ref;
|
3959 |
|
|
|
3960 |
|
|
/* Find the last reference in the chain. */
|
3961 |
|
|
while (ref && ref->next != NULL)
|
3962 |
|
|
{
|
3963 |
|
|
gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT);
|
3964 |
|
|
prev_ref = ref;
|
3965 |
|
|
ref = ref->next;
|
3966 |
|
|
}
|
3967 |
|
|
|
3968 |
|
|
if (ref == NULL || ref->type != REF_ARRAY)
|
3969 |
|
|
return false;
|
3970 |
|
|
|
3971 |
|
|
if (!prev_ref)
|
3972 |
|
|
allocatable_array = expr->symtree->n.sym->attr.allocatable;
|
3973 |
|
|
else
|
3974 |
|
|
allocatable_array = prev_ref->u.c.component->attr.allocatable;
|
3975 |
|
|
|
3976 |
|
|
/* Figure out the size of the array. */
|
3977 |
|
|
switch (ref->u.ar.type)
|
3978 |
|
|
{
|
3979 |
|
|
case AR_ELEMENT:
|
3980 |
|
|
lower = NULL;
|
3981 |
|
|
upper = ref->u.ar.start;
|
3982 |
|
|
break;
|
3983 |
|
|
|
3984 |
|
|
case AR_FULL:
|
3985 |
|
|
gcc_assert (ref->u.ar.as->type == AS_EXPLICIT);
|
3986 |
|
|
|
3987 |
|
|
lower = ref->u.ar.as->lower;
|
3988 |
|
|
upper = ref->u.ar.as->upper;
|
3989 |
|
|
break;
|
3990 |
|
|
|
3991 |
|
|
case AR_SECTION:
|
3992 |
|
|
lower = ref->u.ar.start;
|
3993 |
|
|
upper = ref->u.ar.end;
|
3994 |
|
|
break;
|
3995 |
|
|
|
3996 |
|
|
default:
|
3997 |
|
|
gcc_unreachable ();
|
3998 |
|
|
break;
|
3999 |
|
|
}
|
4000 |
|
|
|
4001 |
|
|
size = gfc_array_init_size (se->expr, ref->u.ar.as->rank, &offset,
|
4002 |
|
|
lower, upper, &se->pre);
|
4003 |
|
|
|
4004 |
|
|
/* Allocate memory to store the data. */
|
4005 |
|
|
pointer = gfc_conv_descriptor_data_get (se->expr);
|
4006 |
|
|
STRIP_NOPS (pointer);
|
4007 |
|
|
|
4008 |
|
|
/* The allocate_array variants take the old pointer as first argument. */
|
4009 |
|
|
if (allocatable_array)
|
4010 |
|
|
tmp = gfc_allocate_array_with_status (&se->pre, pointer, size, pstat, expr);
|
4011 |
|
|
else
|
4012 |
|
|
tmp = gfc_allocate_with_status (&se->pre, size, pstat);
|
4013 |
|
|
tmp = fold_build2 (MODIFY_EXPR, void_type_node, pointer, tmp);
|
4014 |
|
|
gfc_add_expr_to_block (&se->pre, tmp);
|
4015 |
|
|
|
4016 |
|
|
gfc_conv_descriptor_offset_set (&se->pre, se->expr, offset);
|
4017 |
|
|
|
4018 |
|
|
if (expr->ts.type == BT_DERIVED
|
4019 |
|
|
&& expr->ts.u.derived->attr.alloc_comp)
|
4020 |
|
|
{
|
4021 |
|
|
tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, se->expr,
|
4022 |
|
|
ref->u.ar.as->rank);
|
4023 |
|
|
gfc_add_expr_to_block (&se->pre, tmp);
|
4024 |
|
|
}
|
4025 |
|
|
|
4026 |
|
|
return true;
|
4027 |
|
|
}
|
4028 |
|
|
|
4029 |
|
|
|
4030 |
|
|
/* Deallocate an array variable. Also used when an allocated variable goes
|
4031 |
|
|
out of scope. */
|
4032 |
|
|
/*GCC ARRAYS*/
|
4033 |
|
|
|
4034 |
|
|
tree
|
4035 |
|
|
gfc_array_deallocate (tree descriptor, tree pstat, gfc_expr* expr)
|
4036 |
|
|
{
|
4037 |
|
|
tree var;
|
4038 |
|
|
tree tmp;
|
4039 |
|
|
stmtblock_t block;
|
4040 |
|
|
|
4041 |
|
|
gfc_start_block (&block);
|
4042 |
|
|
/* Get a pointer to the data. */
|
4043 |
|
|
var = gfc_conv_descriptor_data_get (descriptor);
|
4044 |
|
|
STRIP_NOPS (var);
|
4045 |
|
|
|
4046 |
|
|
/* Parameter is the address of the data component. */
|
4047 |
|
|
tmp = gfc_deallocate_with_status (var, pstat, false, expr);
|
4048 |
|
|
gfc_add_expr_to_block (&block, tmp);
|
4049 |
|
|
|
4050 |
|
|
/* Zero the data pointer. */
|
4051 |
|
|
tmp = fold_build2 (MODIFY_EXPR, void_type_node,
|
4052 |
|
|
var, build_int_cst (TREE_TYPE (var), 0));
|
4053 |
|
|
gfc_add_expr_to_block (&block, tmp);
|
4054 |
|
|
|
4055 |
|
|
return gfc_finish_block (&block);
|
4056 |
|
|
}
|
4057 |
|
|
|
4058 |
|
|
|
4059 |
|
|
/* Create an array constructor from an initialization expression.
|
4060 |
|
|
We assume the frontend already did any expansions and conversions. */
|
4061 |
|
|
|
4062 |
|
|
tree
|
4063 |
|
|
gfc_conv_array_initializer (tree type, gfc_expr * expr)
|
4064 |
|
|
{
|
4065 |
|
|
gfc_constructor *c;
|
4066 |
|
|
tree tmp;
|
4067 |
|
|
mpz_t maxval;
|
4068 |
|
|
gfc_se se;
|
4069 |
|
|
HOST_WIDE_INT hi;
|
4070 |
|
|
unsigned HOST_WIDE_INT lo;
|
4071 |
|
|
tree index, range;
|
4072 |
|
|
VEC(constructor_elt,gc) *v = NULL;
|
4073 |
|
|
|
4074 |
|
|
switch (expr->expr_type)
|
4075 |
|
|
{
|
4076 |
|
|
case EXPR_CONSTANT:
|
4077 |
|
|
case EXPR_STRUCTURE:
|
4078 |
|
|
/* A single scalar or derived type value. Create an array with all
|
4079 |
|
|
elements equal to that value. */
|
4080 |
|
|
gfc_init_se (&se, NULL);
|
4081 |
|
|
|
4082 |
|
|
if (expr->expr_type == EXPR_CONSTANT)
|
4083 |
|
|
gfc_conv_constant (&se, expr);
|
4084 |
|
|
else
|
4085 |
|
|
gfc_conv_structure (&se, expr, 1);
|
4086 |
|
|
|
4087 |
|
|
tmp = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
|
4088 |
|
|
gcc_assert (tmp && INTEGER_CST_P (tmp));
|
4089 |
|
|
hi = TREE_INT_CST_HIGH (tmp);
|
4090 |
|
|
lo = TREE_INT_CST_LOW (tmp);
|
4091 |
|
|
lo++;
|
4092 |
|
|
if (lo == 0)
|
4093 |
|
|
hi++;
|
4094 |
|
|
/* This will probably eat buckets of memory for large arrays. */
|
4095 |
|
|
while (hi != 0 || lo != 0)
|
4096 |
|
|
{
|
4097 |
|
|
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, se.expr);
|
4098 |
|
|
if (lo == 0)
|
4099 |
|
|
hi--;
|
4100 |
|
|
lo--;
|
4101 |
|
|
}
|
4102 |
|
|
break;
|
4103 |
|
|
|
4104 |
|
|
case EXPR_ARRAY:
|
4105 |
|
|
/* Create a vector of all the elements. */
|
4106 |
|
|
for (c = expr->value.constructor; c; c = c->next)
|
4107 |
|
|
{
|
4108 |
|
|
if (c->iterator)
|
4109 |
|
|
{
|
4110 |
|
|
/* Problems occur when we get something like
|
4111 |
|
|
integer :: a(lots) = (/(i, i=1, lots)/) */
|
4112 |
|
|
gfc_fatal_error ("The number of elements in the array constructor "
|
4113 |
|
|
"at %L requires an increase of the allowed %d "
|
4114 |
|
|
"upper limit. See -fmax-array-constructor "
|
4115 |
|
|
"option", &expr->where,
|
4116 |
|
|
gfc_option.flag_max_array_constructor);
|
4117 |
|
|
return NULL_TREE;
|
4118 |
|
|
}
|
4119 |
|
|
if (mpz_cmp_si (c->n.offset, 0) != 0)
|
4120 |
|
|
index = gfc_conv_mpz_to_tree (c->n.offset, gfc_index_integer_kind);
|
4121 |
|
|
else
|
4122 |
|
|
index = NULL_TREE;
|
4123 |
|
|
mpz_init (maxval);
|
4124 |
|
|
if (mpz_cmp_si (c->repeat, 0) != 0)
|
4125 |
|
|
{
|
4126 |
|
|
tree tmp1, tmp2;
|
4127 |
|
|
|
4128 |
|
|
mpz_set (maxval, c->repeat);
|
4129 |
|
|
mpz_add (maxval, c->n.offset, maxval);
|
4130 |
|
|
mpz_sub_ui (maxval, maxval, 1);
|
4131 |
|
|
tmp2 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
|
4132 |
|
|
if (mpz_cmp_si (c->n.offset, 0) != 0)
|
4133 |
|
|
{
|
4134 |
|
|
mpz_add_ui (maxval, c->n.offset, 1);
|
4135 |
|
|
tmp1 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
|
4136 |
|
|
}
|
4137 |
|
|
else
|
4138 |
|
|
tmp1 = gfc_conv_mpz_to_tree (c->n.offset, gfc_index_integer_kind);
|
4139 |
|
|
|
4140 |
|
|
range = fold_build2 (RANGE_EXPR, integer_type_node, tmp1, tmp2);
|
4141 |
|
|
}
|
4142 |
|
|
else
|
4143 |
|
|
range = NULL;
|
4144 |
|
|
mpz_clear (maxval);
|
4145 |
|
|
|
4146 |
|
|
gfc_init_se (&se, NULL);
|
4147 |
|
|
switch (c->expr->expr_type)
|
4148 |
|
|
{
|
4149 |
|
|
case EXPR_CONSTANT:
|
4150 |
|
|
gfc_conv_constant (&se, c->expr);
|
4151 |
|
|
if (range == NULL_TREE)
|
4152 |
|
|
CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
|
4153 |
|
|
else
|
4154 |
|
|
{
|
4155 |
|
|
if (index != NULL_TREE)
|
4156 |
|
|
CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
|
4157 |
|
|
CONSTRUCTOR_APPEND_ELT (v, range, se.expr);
|
4158 |
|
|
}
|
4159 |
|
|
break;
|
4160 |
|
|
|
4161 |
|
|
case EXPR_STRUCTURE:
|
4162 |
|
|
gfc_conv_structure (&se, c->expr, 1);
|
4163 |
|
|
CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
|
4164 |
|
|
break;
|
4165 |
|
|
|
4166 |
|
|
|
4167 |
|
|
default:
|
4168 |
|
|
/* Catch those occasional beasts that do not simplify
|
4169 |
|
|
for one reason or another, assuming that if they are
|
4170 |
|
|
standard defying the frontend will catch them. */
|
4171 |
|
|
gfc_conv_expr (&se, c->expr);
|
4172 |
|
|
if (range == NULL_TREE)
|
4173 |
|
|
CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
|
4174 |
|
|
else
|
4175 |
|
|
{
|
4176 |
|
|
if (index != NULL_TREE)
|
4177 |
|
|
CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
|
4178 |
|
|
CONSTRUCTOR_APPEND_ELT (v, range, se.expr);
|
4179 |
|
|
}
|
4180 |
|
|
break;
|
4181 |
|
|
}
|
4182 |
|
|
}
|
4183 |
|
|
break;
|
4184 |
|
|
|
4185 |
|
|
case EXPR_NULL:
|
4186 |
|
|
return gfc_build_null_descriptor (type);
|
4187 |
|
|
|
4188 |
|
|
default:
|
4189 |
|
|
gcc_unreachable ();
|
4190 |
|
|
}
|
4191 |
|
|
|
4192 |
|
|
/* Create a constructor from the list of elements. */
|
4193 |
|
|
tmp = build_constructor (type, v);
|
4194 |
|
|
TREE_CONSTANT (tmp) = 1;
|
4195 |
|
|
return tmp;
|
4196 |
|
|
}
|
4197 |
|
|
|
4198 |
|
|
|
4199 |
|
|
/* Generate code to evaluate non-constant array bounds. Sets *poffset and
|
4200 |
|
|
returns the size (in elements) of the array. */
|
4201 |
|
|
|
4202 |
|
|
static tree
|
4203 |
|
|
gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
|
4204 |
|
|
stmtblock_t * pblock)
|
4205 |
|
|
{
|
4206 |
|
|
gfc_array_spec *as;
|
4207 |
|
|
tree size;
|
4208 |
|
|
tree stride;
|
4209 |
|
|
tree offset;
|
4210 |
|
|
tree ubound;
|
4211 |
|
|
tree lbound;
|
4212 |
|
|
tree tmp;
|
4213 |
|
|
gfc_se se;
|
4214 |
|
|
|
4215 |
|
|
int dim;
|
4216 |
|
|
|
4217 |
|
|
as = sym->as;
|
4218 |
|
|
|
4219 |
|
|
size = gfc_index_one_node;
|
4220 |
|
|
offset = gfc_index_zero_node;
|
4221 |
|
|
for (dim = 0; dim < as->rank; dim++)
|
4222 |
|
|
{
|
4223 |
|
|
/* Evaluate non-constant array bound expressions. */
|
4224 |
|
|
lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
|
4225 |
|
|
if (as->lower[dim] && !INTEGER_CST_P (lbound))
|
4226 |
|
|
{
|
4227 |
|
|
gfc_init_se (&se, NULL);
|
4228 |
|
|
gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
|
4229 |
|
|
gfc_add_block_to_block (pblock, &se.pre);
|
4230 |
|
|
gfc_add_modify (pblock, lbound, se.expr);
|
4231 |
|
|
}
|
4232 |
|
|
ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
|
4233 |
|
|
if (as->upper[dim] && !INTEGER_CST_P (ubound))
|
4234 |
|
|
{
|
4235 |
|
|
gfc_init_se (&se, NULL);
|
4236 |
|
|
gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
|
4237 |
|
|
gfc_add_block_to_block (pblock, &se.pre);
|
4238 |
|
|
gfc_add_modify (pblock, ubound, se.expr);
|
4239 |
|
|
}
|
4240 |
|
|
/* The offset of this dimension. offset = offset - lbound * stride. */
|
4241 |
|
|
tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, lbound, size);
|
4242 |
|
|
offset = fold_build2 (MINUS_EXPR, gfc_array_index_type, offset, tmp);
|
4243 |
|
|
|
4244 |
|
|
/* The size of this dimension, and the stride of the next. */
|
4245 |
|
|
if (dim + 1 < as->rank)
|
4246 |
|
|
stride = GFC_TYPE_ARRAY_STRIDE (type, dim + 1);
|
4247 |
|
|
else
|
4248 |
|
|
stride = GFC_TYPE_ARRAY_SIZE (type);
|
4249 |
|
|
|
4250 |
|
|
if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride)))
|
4251 |
|
|
{
|
4252 |
|
|
/* Calculate stride = size * (ubound + 1 - lbound). */
|
4253 |
|
|
tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
4254 |
|
|
gfc_index_one_node, lbound);
|
4255 |
|
|
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, ubound, tmp);
|
4256 |
|
|
tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
|
4257 |
|
|
if (stride)
|
4258 |
|
|
gfc_add_modify (pblock, stride, tmp);
|
4259 |
|
|
else
|
4260 |
|
|
stride = gfc_evaluate_now (tmp, pblock);
|
4261 |
|
|
|
4262 |
|
|
/* Make sure that negative size arrays are translated
|
4263 |
|
|
to being zero size. */
|
4264 |
|
|
tmp = fold_build2 (GE_EXPR, boolean_type_node,
|
4265 |
|
|
stride, gfc_index_zero_node);
|
4266 |
|
|
tmp = fold_build3 (COND_EXPR, gfc_array_index_type, tmp,
|
4267 |
|
|
stride, gfc_index_zero_node);
|
4268 |
|
|
gfc_add_modify (pblock, stride, tmp);
|
4269 |
|
|
}
|
4270 |
|
|
|
4271 |
|
|
size = stride;
|
4272 |
|
|
}
|
4273 |
|
|
|
4274 |
|
|
gfc_trans_vla_type_sizes (sym, pblock);
|
4275 |
|
|
|
4276 |
|
|
*poffset = offset;
|
4277 |
|
|
return size;
|
4278 |
|
|
}
|
4279 |
|
|
|
4280 |
|
|
|
4281 |
|
|
/* Generate code to initialize/allocate an array variable. */
|
4282 |
|
|
|
4283 |
|
|
tree
|
4284 |
|
|
gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym, tree fnbody)
|
4285 |
|
|
{
|
4286 |
|
|
stmtblock_t block;
|
4287 |
|
|
tree type;
|
4288 |
|
|
tree tmp;
|
4289 |
|
|
tree size;
|
4290 |
|
|
tree offset;
|
4291 |
|
|
bool onstack;
|
4292 |
|
|
|
4293 |
|
|
gcc_assert (!(sym->attr.pointer || sym->attr.allocatable));
|
4294 |
|
|
|
4295 |
|
|
/* Do nothing for USEd variables. */
|
4296 |
|
|
if (sym->attr.use_assoc)
|
4297 |
|
|
return fnbody;
|
4298 |
|
|
|
4299 |
|
|
type = TREE_TYPE (decl);
|
4300 |
|
|
gcc_assert (GFC_ARRAY_TYPE_P (type));
|
4301 |
|
|
onstack = TREE_CODE (type) != POINTER_TYPE;
|
4302 |
|
|
|
4303 |
|
|
gfc_start_block (&block);
|
4304 |
|
|
|
4305 |
|
|
/* Evaluate character string length. */
|
4306 |
|
|
if (sym->ts.type == BT_CHARACTER
|
4307 |
|
|
&& onstack && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
|
4308 |
|
|
{
|
4309 |
|
|
gfc_conv_string_length (sym->ts.u.cl, NULL, &block);
|
4310 |
|
|
|
4311 |
|
|
gfc_trans_vla_type_sizes (sym, &block);
|
4312 |
|
|
|
4313 |
|
|
/* Emit a DECL_EXPR for this variable, which will cause the
|
4314 |
|
|
gimplifier to allocate storage, and all that good stuff. */
|
4315 |
|
|
tmp = fold_build1 (DECL_EXPR, TREE_TYPE (decl), decl);
|
4316 |
|
|
gfc_add_expr_to_block (&block, tmp);
|
4317 |
|
|
}
|
4318 |
|
|
|
4319 |
|
|
if (onstack)
|
4320 |
|
|
{
|
4321 |
|
|
gfc_add_expr_to_block (&block, fnbody);
|
4322 |
|
|
return gfc_finish_block (&block);
|
4323 |
|
|
}
|
4324 |
|
|
|
4325 |
|
|
type = TREE_TYPE (type);
|
4326 |
|
|
|
4327 |
|
|
gcc_assert (!sym->attr.use_assoc);
|
4328 |
|
|
gcc_assert (!TREE_STATIC (decl));
|
4329 |
|
|
gcc_assert (!sym->module);
|
4330 |
|
|
|
4331 |
|
|
if (sym->ts.type == BT_CHARACTER
|
4332 |
|
|
&& !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
|
4333 |
|
|
gfc_conv_string_length (sym->ts.u.cl, NULL, &block);
|
4334 |
|
|
|
4335 |
|
|
size = gfc_trans_array_bounds (type, sym, &offset, &block);
|
4336 |
|
|
|
4337 |
|
|
/* Don't actually allocate space for Cray Pointees. */
|
4338 |
|
|
if (sym->attr.cray_pointee)
|
4339 |
|
|
{
|
4340 |
|
|
if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
|
4341 |
|
|
gfc_add_modify (&block, GFC_TYPE_ARRAY_OFFSET (type), offset);
|
4342 |
|
|
gfc_add_expr_to_block (&block, fnbody);
|
4343 |
|
|
return gfc_finish_block (&block);
|
4344 |
|
|
}
|
4345 |
|
|
|
4346 |
|
|
/* The size is the number of elements in the array, so multiply by the
|
4347 |
|
|
size of an element to get the total size. */
|
4348 |
|
|
tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
|
4349 |
|
|
size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
|
4350 |
|
|
fold_convert (gfc_array_index_type, tmp));
|
4351 |
|
|
|
4352 |
|
|
/* Allocate memory to hold the data. */
|
4353 |
|
|
tmp = gfc_call_malloc (&block, TREE_TYPE (decl), size);
|
4354 |
|
|
gfc_add_modify (&block, decl, tmp);
|
4355 |
|
|
|
4356 |
|
|
/* Set offset of the array. */
|
4357 |
|
|
if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
|
4358 |
|
|
gfc_add_modify (&block, GFC_TYPE_ARRAY_OFFSET (type), offset);
|
4359 |
|
|
|
4360 |
|
|
|
4361 |
|
|
/* Automatic arrays should not have initializers. */
|
4362 |
|
|
gcc_assert (!sym->value);
|
4363 |
|
|
|
4364 |
|
|
gfc_add_expr_to_block (&block, fnbody);
|
4365 |
|
|
|
4366 |
|
|
/* Free the temporary. */
|
4367 |
|
|
tmp = gfc_call_free (convert (pvoid_type_node, decl));
|
4368 |
|
|
gfc_add_expr_to_block (&block, tmp);
|
4369 |
|
|
|
4370 |
|
|
return gfc_finish_block (&block);
|
4371 |
|
|
}
|
4372 |
|
|
|
4373 |
|
|
|
4374 |
|
|
/* Generate entry and exit code for g77 calling convention arrays. */
|
4375 |
|
|
|
4376 |
|
|
tree
|
4377 |
|
|
gfc_trans_g77_array (gfc_symbol * sym, tree body)
|
4378 |
|
|
{
|
4379 |
|
|
tree parm;
|
4380 |
|
|
tree type;
|
4381 |
|
|
locus loc;
|
4382 |
|
|
tree offset;
|
4383 |
|
|
tree tmp;
|
4384 |
|
|
tree stmt;
|
4385 |
|
|
stmtblock_t block;
|
4386 |
|
|
|
4387 |
|
|
gfc_get_backend_locus (&loc);
|
4388 |
|
|
gfc_set_backend_locus (&sym->declared_at);
|
4389 |
|
|
|
4390 |
|
|
/* Descriptor type. */
|
4391 |
|
|
parm = sym->backend_decl;
|
4392 |
|
|
type = TREE_TYPE (parm);
|
4393 |
|
|
gcc_assert (GFC_ARRAY_TYPE_P (type));
|
4394 |
|
|
|
4395 |
|
|
gfc_start_block (&block);
|
4396 |
|
|
|
4397 |
|
|
if (sym->ts.type == BT_CHARACTER
|
4398 |
|
|
&& TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
|
4399 |
|
|
gfc_conv_string_length (sym->ts.u.cl, NULL, &block);
|
4400 |
|
|
|
4401 |
|
|
/* Evaluate the bounds of the array. */
|
4402 |
|
|
gfc_trans_array_bounds (type, sym, &offset, &block);
|
4403 |
|
|
|
4404 |
|
|
/* Set the offset. */
|
4405 |
|
|
if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
|
4406 |
|
|
gfc_add_modify (&block, GFC_TYPE_ARRAY_OFFSET (type), offset);
|
4407 |
|
|
|
4408 |
|
|
/* Set the pointer itself if we aren't using the parameter directly. */
|
4409 |
|
|
if (TREE_CODE (parm) != PARM_DECL)
|
4410 |
|
|
{
|
4411 |
|
|
tmp = convert (TREE_TYPE (parm), GFC_DECL_SAVED_DESCRIPTOR (parm));
|
4412 |
|
|
gfc_add_modify (&block, parm, tmp);
|
4413 |
|
|
}
|
4414 |
|
|
stmt = gfc_finish_block (&block);
|
4415 |
|
|
|
4416 |
|
|
gfc_set_backend_locus (&loc);
|
4417 |
|
|
|
4418 |
|
|
gfc_start_block (&block);
|
4419 |
|
|
|
4420 |
|
|
/* Add the initialization code to the start of the function. */
|
4421 |
|
|
|
4422 |
|
|
if (sym->attr.optional || sym->attr.not_always_present)
|
4423 |
|
|
{
|
4424 |
|
|
tmp = gfc_conv_expr_present (sym);
|
4425 |
|
|
stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
|
4426 |
|
|
}
|
4427 |
|
|
|
4428 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
4429 |
|
|
gfc_add_expr_to_block (&block, body);
|
4430 |
|
|
|
4431 |
|
|
return gfc_finish_block (&block);
|
4432 |
|
|
}
|
4433 |
|
|
|
4434 |
|
|
|
4435 |
|
|
/* Modify the descriptor of an array parameter so that it has the
|
4436 |
|
|
correct lower bound. Also move the upper bound accordingly.
|
4437 |
|
|
If the array is not packed, it will be copied into a temporary.
|
4438 |
|
|
For each dimension we set the new lower and upper bounds. Then we copy the
|
4439 |
|
|
stride and calculate the offset for this dimension. We also work out
|
4440 |
|
|
what the stride of a packed array would be, and see it the two match.
|
4441 |
|
|
If the array need repacking, we set the stride to the values we just
|
4442 |
|
|
calculated, recalculate the offset and copy the array data.
|
4443 |
|
|
Code is also added to copy the data back at the end of the function.
|
4444 |
|
|
*/
|
4445 |
|
|
|
4446 |
|
|
tree
|
4447 |
|
|
gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body)
|
4448 |
|
|
{
|
4449 |
|
|
tree size;
|
4450 |
|
|
tree type;
|
4451 |
|
|
tree offset;
|
4452 |
|
|
locus loc;
|
4453 |
|
|
stmtblock_t block;
|
4454 |
|
|
stmtblock_t cleanup;
|
4455 |
|
|
tree lbound;
|
4456 |
|
|
tree ubound;
|
4457 |
|
|
tree dubound;
|
4458 |
|
|
tree dlbound;
|
4459 |
|
|
tree dumdesc;
|
4460 |
|
|
tree tmp;
|
4461 |
|
|
tree stmt;
|
4462 |
|
|
tree stride, stride2;
|
4463 |
|
|
tree stmt_packed;
|
4464 |
|
|
tree stmt_unpacked;
|
4465 |
|
|
tree partial;
|
4466 |
|
|
gfc_se se;
|
4467 |
|
|
int n;
|
4468 |
|
|
int checkparm;
|
4469 |
|
|
int no_repack;
|
4470 |
|
|
bool optional_arg;
|
4471 |
|
|
|
4472 |
|
|
/* Do nothing for pointer and allocatable arrays. */
|
4473 |
|
|
if (sym->attr.pointer || sym->attr.allocatable)
|
4474 |
|
|
return body;
|
4475 |
|
|
|
4476 |
|
|
if (sym->attr.dummy && gfc_is_nodesc_array (sym))
|
4477 |
|
|
return gfc_trans_g77_array (sym, body);
|
4478 |
|
|
|
4479 |
|
|
gfc_get_backend_locus (&loc);
|
4480 |
|
|
gfc_set_backend_locus (&sym->declared_at);
|
4481 |
|
|
|
4482 |
|
|
/* Descriptor type. */
|
4483 |
|
|
type = TREE_TYPE (tmpdesc);
|
4484 |
|
|
gcc_assert (GFC_ARRAY_TYPE_P (type));
|
4485 |
|
|
dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
|
4486 |
|
|
dumdesc = build_fold_indirect_ref_loc (input_location,
|
4487 |
|
|
dumdesc);
|
4488 |
|
|
gfc_start_block (&block);
|
4489 |
|
|
|
4490 |
|
|
if (sym->ts.type == BT_CHARACTER
|
4491 |
|
|
&& TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
|
4492 |
|
|
gfc_conv_string_length (sym->ts.u.cl, NULL, &block);
|
4493 |
|
|
|
4494 |
|
|
checkparm = (sym->as->type == AS_EXPLICIT
|
4495 |
|
|
&& (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS));
|
4496 |
|
|
|
4497 |
|
|
no_repack = !(GFC_DECL_PACKED_ARRAY (tmpdesc)
|
4498 |
|
|
|| GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc));
|
4499 |
|
|
|
4500 |
|
|
if (GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc))
|
4501 |
|
|
{
|
4502 |
|
|
/* For non-constant shape arrays we only check if the first dimension
|
4503 |
|
|
is contiguous. Repacking higher dimensions wouldn't gain us
|
4504 |
|
|
anything as we still don't know the array stride. */
|
4505 |
|
|
partial = gfc_create_var (boolean_type_node, "partial");
|
4506 |
|
|
TREE_USED (partial) = 1;
|
4507 |
|
|
tmp = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
|
4508 |
|
|
tmp = fold_build2 (EQ_EXPR, boolean_type_node, tmp, gfc_index_one_node);
|
4509 |
|
|
gfc_add_modify (&block, partial, tmp);
|
4510 |
|
|
}
|
4511 |
|
|
else
|
4512 |
|
|
{
|
4513 |
|
|
partial = NULL_TREE;
|
4514 |
|
|
}
|
4515 |
|
|
|
4516 |
|
|
/* The naming of stmt_unpacked and stmt_packed may be counter-intuitive
|
4517 |
|
|
here, however I think it does the right thing. */
|
4518 |
|
|
if (no_repack)
|
4519 |
|
|
{
|
4520 |
|
|
/* Set the first stride. */
|
4521 |
|
|
stride = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
|
4522 |
|
|
stride = gfc_evaluate_now (stride, &block);
|
4523 |
|
|
|
4524 |
|
|
tmp = fold_build2 (EQ_EXPR, boolean_type_node,
|
4525 |
|
|
stride, gfc_index_zero_node);
|
4526 |
|
|
tmp = fold_build3 (COND_EXPR, gfc_array_index_type, tmp,
|
4527 |
|
|
gfc_index_one_node, stride);
|
4528 |
|
|
stride = GFC_TYPE_ARRAY_STRIDE (type, 0);
|
4529 |
|
|
gfc_add_modify (&block, stride, tmp);
|
4530 |
|
|
|
4531 |
|
|
/* Allow the user to disable array repacking. */
|
4532 |
|
|
stmt_unpacked = NULL_TREE;
|
4533 |
|
|
}
|
4534 |
|
|
else
|
4535 |
|
|
{
|
4536 |
|
|
gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type, 0)));
|
4537 |
|
|
/* A library call to repack the array if necessary. */
|
4538 |
|
|
tmp = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
|
4539 |
|
|
stmt_unpacked = build_call_expr_loc (input_location,
|
4540 |
|
|
gfor_fndecl_in_pack, 1, tmp);
|
4541 |
|
|
|
4542 |
|
|
stride = gfc_index_one_node;
|
4543 |
|
|
|
4544 |
|
|
if (gfc_option.warn_array_temp)
|
4545 |
|
|
gfc_warning ("Creating array temporary at %L", &loc);
|
4546 |
|
|
}
|
4547 |
|
|
|
4548 |
|
|
/* This is for the case where the array data is used directly without
|
4549 |
|
|
calling the repack function. */
|
4550 |
|
|
if (no_repack || partial != NULL_TREE)
|
4551 |
|
|
stmt_packed = gfc_conv_descriptor_data_get (dumdesc);
|
4552 |
|
|
else
|
4553 |
|
|
stmt_packed = NULL_TREE;
|
4554 |
|
|
|
4555 |
|
|
/* Assign the data pointer. */
|
4556 |
|
|
if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
|
4557 |
|
|
{
|
4558 |
|
|
/* Don't repack unknown shape arrays when the first stride is 1. */
|
4559 |
|
|
tmp = fold_build3 (COND_EXPR, TREE_TYPE (stmt_packed),
|
4560 |
|
|
partial, stmt_packed, stmt_unpacked);
|
4561 |
|
|
}
|
4562 |
|
|
else
|
4563 |
|
|
tmp = stmt_packed != NULL_TREE ? stmt_packed : stmt_unpacked;
|
4564 |
|
|
gfc_add_modify (&block, tmpdesc, fold_convert (type, tmp));
|
4565 |
|
|
|
4566 |
|
|
offset = gfc_index_zero_node;
|
4567 |
|
|
size = gfc_index_one_node;
|
4568 |
|
|
|
4569 |
|
|
/* Evaluate the bounds of the array. */
|
4570 |
|
|
for (n = 0; n < sym->as->rank; n++)
|
4571 |
|
|
{
|
4572 |
|
|
if (checkparm || !sym->as->upper[n])
|
4573 |
|
|
{
|
4574 |
|
|
/* Get the bounds of the actual parameter. */
|
4575 |
|
|
dubound = gfc_conv_descriptor_ubound_get (dumdesc, gfc_rank_cst[n]);
|
4576 |
|
|
dlbound = gfc_conv_descriptor_lbound_get (dumdesc, gfc_rank_cst[n]);
|
4577 |
|
|
}
|
4578 |
|
|
else
|
4579 |
|
|
{
|
4580 |
|
|
dubound = NULL_TREE;
|
4581 |
|
|
dlbound = NULL_TREE;
|
4582 |
|
|
}
|
4583 |
|
|
|
4584 |
|
|
lbound = GFC_TYPE_ARRAY_LBOUND (type, n);
|
4585 |
|
|
if (!INTEGER_CST_P (lbound))
|
4586 |
|
|
{
|
4587 |
|
|
gfc_init_se (&se, NULL);
|
4588 |
|
|
gfc_conv_expr_type (&se, sym->as->lower[n],
|
4589 |
|
|
gfc_array_index_type);
|
4590 |
|
|
gfc_add_block_to_block (&block, &se.pre);
|
4591 |
|
|
gfc_add_modify (&block, lbound, se.expr);
|
4592 |
|
|
}
|
4593 |
|
|
|
4594 |
|
|
ubound = GFC_TYPE_ARRAY_UBOUND (type, n);
|
4595 |
|
|
/* Set the desired upper bound. */
|
4596 |
|
|
if (sym->as->upper[n])
|
4597 |
|
|
{
|
4598 |
|
|
/* We know what we want the upper bound to be. */
|
4599 |
|
|
if (!INTEGER_CST_P (ubound))
|
4600 |
|
|
{
|
4601 |
|
|
gfc_init_se (&se, NULL);
|
4602 |
|
|
gfc_conv_expr_type (&se, sym->as->upper[n],
|
4603 |
|
|
gfc_array_index_type);
|
4604 |
|
|
gfc_add_block_to_block (&block, &se.pre);
|
4605 |
|
|
gfc_add_modify (&block, ubound, se.expr);
|
4606 |
|
|
}
|
4607 |
|
|
|
4608 |
|
|
/* Check the sizes match. */
|
4609 |
|
|
if (checkparm)
|
4610 |
|
|
{
|
4611 |
|
|
/* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */
|
4612 |
|
|
char * msg;
|
4613 |
|
|
tree temp;
|
4614 |
|
|
|
4615 |
|
|
temp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
4616 |
|
|
ubound, lbound);
|
4617 |
|
|
temp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
4618 |
|
|
gfc_index_one_node, temp);
|
4619 |
|
|
|
4620 |
|
|
stride2 = fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
4621 |
|
|
dubound, dlbound);
|
4622 |
|
|
stride2 = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
4623 |
|
|
gfc_index_one_node, stride2);
|
4624 |
|
|
|
4625 |
|
|
tmp = fold_build2 (NE_EXPR, gfc_array_index_type, temp, stride2);
|
4626 |
|
|
asprintf (&msg, "Dimension %d of array '%s' has extent "
|
4627 |
|
|
"%%ld instead of %%ld", n+1, sym->name);
|
4628 |
|
|
|
4629 |
|
|
gfc_trans_runtime_check (true, false, tmp, &block, &loc, msg,
|
4630 |
|
|
fold_convert (long_integer_type_node, temp),
|
4631 |
|
|
fold_convert (long_integer_type_node, stride2));
|
4632 |
|
|
|
4633 |
|
|
gfc_free (msg);
|
4634 |
|
|
}
|
4635 |
|
|
}
|
4636 |
|
|
else
|
4637 |
|
|
{
|
4638 |
|
|
/* For assumed shape arrays move the upper bound by the same amount
|
4639 |
|
|
as the lower bound. */
|
4640 |
|
|
tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
4641 |
|
|
dubound, dlbound);
|
4642 |
|
|
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, tmp, lbound);
|
4643 |
|
|
gfc_add_modify (&block, ubound, tmp);
|
4644 |
|
|
}
|
4645 |
|
|
/* The offset of this dimension. offset = offset - lbound * stride. */
|
4646 |
|
|
tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, lbound, stride);
|
4647 |
|
|
offset = fold_build2 (MINUS_EXPR, gfc_array_index_type, offset, tmp);
|
4648 |
|
|
|
4649 |
|
|
/* The size of this dimension, and the stride of the next. */
|
4650 |
|
|
if (n + 1 < sym->as->rank)
|
4651 |
|
|
{
|
4652 |
|
|
stride = GFC_TYPE_ARRAY_STRIDE (type, n + 1);
|
4653 |
|
|
|
4654 |
|
|
if (no_repack || partial != NULL_TREE)
|
4655 |
|
|
{
|
4656 |
|
|
stmt_unpacked =
|
4657 |
|
|
gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[n+1]);
|
4658 |
|
|
}
|
4659 |
|
|
|
4660 |
|
|
/* Figure out the stride if not a known constant. */
|
4661 |
|
|
if (!INTEGER_CST_P (stride))
|
4662 |
|
|
{
|
4663 |
|
|
if (no_repack)
|
4664 |
|
|
stmt_packed = NULL_TREE;
|
4665 |
|
|
else
|
4666 |
|
|
{
|
4667 |
|
|
/* Calculate stride = size * (ubound + 1 - lbound). */
|
4668 |
|
|
tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
4669 |
|
|
gfc_index_one_node, lbound);
|
4670 |
|
|
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
4671 |
|
|
ubound, tmp);
|
4672 |
|
|
size = fold_build2 (MULT_EXPR, gfc_array_index_type,
|
4673 |
|
|
size, tmp);
|
4674 |
|
|
stmt_packed = size;
|
4675 |
|
|
}
|
4676 |
|
|
|
4677 |
|
|
/* Assign the stride. */
|
4678 |
|
|
if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
|
4679 |
|
|
tmp = fold_build3 (COND_EXPR, gfc_array_index_type, partial,
|
4680 |
|
|
stmt_unpacked, stmt_packed);
|
4681 |
|
|
else
|
4682 |
|
|
tmp = (stmt_packed != NULL_TREE) ? stmt_packed : stmt_unpacked;
|
4683 |
|
|
gfc_add_modify (&block, stride, tmp);
|
4684 |
|
|
}
|
4685 |
|
|
}
|
4686 |
|
|
else
|
4687 |
|
|
{
|
4688 |
|
|
stride = GFC_TYPE_ARRAY_SIZE (type);
|
4689 |
|
|
|
4690 |
|
|
if (stride && !INTEGER_CST_P (stride))
|
4691 |
|
|
{
|
4692 |
|
|
/* Calculate size = stride * (ubound + 1 - lbound). */
|
4693 |
|
|
tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
4694 |
|
|
gfc_index_one_node, lbound);
|
4695 |
|
|
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
4696 |
|
|
ubound, tmp);
|
4697 |
|
|
tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
|
4698 |
|
|
GFC_TYPE_ARRAY_STRIDE (type, n), tmp);
|
4699 |
|
|
gfc_add_modify (&block, stride, tmp);
|
4700 |
|
|
}
|
4701 |
|
|
}
|
4702 |
|
|
}
|
4703 |
|
|
|
4704 |
|
|
/* Set the offset. */
|
4705 |
|
|
if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
|
4706 |
|
|
gfc_add_modify (&block, GFC_TYPE_ARRAY_OFFSET (type), offset);
|
4707 |
|
|
|
4708 |
|
|
gfc_trans_vla_type_sizes (sym, &block);
|
4709 |
|
|
|
4710 |
|
|
stmt = gfc_finish_block (&block);
|
4711 |
|
|
|
4712 |
|
|
gfc_start_block (&block);
|
4713 |
|
|
|
4714 |
|
|
/* Only do the entry/initialization code if the arg is present. */
|
4715 |
|
|
dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
|
4716 |
|
|
optional_arg = (sym->attr.optional
|
4717 |
|
|
|| (sym->ns->proc_name->attr.entry_master
|
4718 |
|
|
&& sym->attr.dummy));
|
4719 |
|
|
if (optional_arg)
|
4720 |
|
|
{
|
4721 |
|
|
tmp = gfc_conv_expr_present (sym);
|
4722 |
|
|
stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
|
4723 |
|
|
}
|
4724 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
4725 |
|
|
|
4726 |
|
|
/* Add the main function body. */
|
4727 |
|
|
gfc_add_expr_to_block (&block, body);
|
4728 |
|
|
|
4729 |
|
|
/* Cleanup code. */
|
4730 |
|
|
if (!no_repack)
|
4731 |
|
|
{
|
4732 |
|
|
gfc_start_block (&cleanup);
|
4733 |
|
|
|
4734 |
|
|
if (sym->attr.intent != INTENT_IN)
|
4735 |
|
|
{
|
4736 |
|
|
/* Copy the data back. */
|
4737 |
|
|
tmp = build_call_expr_loc (input_location,
|
4738 |
|
|
gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc);
|
4739 |
|
|
gfc_add_expr_to_block (&cleanup, tmp);
|
4740 |
|
|
}
|
4741 |
|
|
|
4742 |
|
|
/* Free the temporary. */
|
4743 |
|
|
tmp = gfc_call_free (tmpdesc);
|
4744 |
|
|
gfc_add_expr_to_block (&cleanup, tmp);
|
4745 |
|
|
|
4746 |
|
|
stmt = gfc_finish_block (&cleanup);
|
4747 |
|
|
|
4748 |
|
|
/* Only do the cleanup if the array was repacked. */
|
4749 |
|
|
tmp = build_fold_indirect_ref_loc (input_location,
|
4750 |
|
|
dumdesc);
|
4751 |
|
|
tmp = gfc_conv_descriptor_data_get (tmp);
|
4752 |
|
|
tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp, tmpdesc);
|
4753 |
|
|
stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
|
4754 |
|
|
|
4755 |
|
|
if (optional_arg)
|
4756 |
|
|
{
|
4757 |
|
|
tmp = gfc_conv_expr_present (sym);
|
4758 |
|
|
stmt = build3_v (COND_EXPR, tmp, stmt,
|
4759 |
|
|
build_empty_stmt (input_location));
|
4760 |
|
|
}
|
4761 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
4762 |
|
|
}
|
4763 |
|
|
/* We don't need to free any memory allocated by internal_pack as it will
|
4764 |
|
|
be freed at the end of the function by pop_context. */
|
4765 |
|
|
return gfc_finish_block (&block);
|
4766 |
|
|
}
|
4767 |
|
|
|
4768 |
|
|
|
4769 |
|
|
/* Calculate the overall offset, including subreferences. */
|
4770 |
|
|
static void
|
4771 |
|
|
gfc_get_dataptr_offset (stmtblock_t *block, tree parm, tree desc, tree offset,
|
4772 |
|
|
bool subref, gfc_expr *expr)
|
4773 |
|
|
{
|
4774 |
|
|
tree tmp;
|
4775 |
|
|
tree field;
|
4776 |
|
|
tree stride;
|
4777 |
|
|
tree index;
|
4778 |
|
|
gfc_ref *ref;
|
4779 |
|
|
gfc_se start;
|
4780 |
|
|
int n;
|
4781 |
|
|
|
4782 |
|
|
/* If offset is NULL and this is not a subreferenced array, there is
|
4783 |
|
|
nothing to do. */
|
4784 |
|
|
if (offset == NULL_TREE)
|
4785 |
|
|
{
|
4786 |
|
|
if (subref)
|
4787 |
|
|
offset = gfc_index_zero_node;
|
4788 |
|
|
else
|
4789 |
|
|
return;
|
4790 |
|
|
}
|
4791 |
|
|
|
4792 |
|
|
tmp = gfc_conv_array_data (desc);
|
4793 |
|
|
tmp = build_fold_indirect_ref_loc (input_location,
|
4794 |
|
|
tmp);
|
4795 |
|
|
tmp = gfc_build_array_ref (tmp, offset, NULL);
|
4796 |
|
|
|
4797 |
|
|
/* Offset the data pointer for pointer assignments from arrays with
|
4798 |
|
|
subreferences; e.g. my_integer => my_type(:)%integer_component. */
|
4799 |
|
|
if (subref)
|
4800 |
|
|
{
|
4801 |
|
|
/* Go past the array reference. */
|
4802 |
|
|
for (ref = expr->ref; ref; ref = ref->next)
|
4803 |
|
|
if (ref->type == REF_ARRAY &&
|
4804 |
|
|
ref->u.ar.type != AR_ELEMENT)
|
4805 |
|
|
{
|
4806 |
|
|
ref = ref->next;
|
4807 |
|
|
break;
|
4808 |
|
|
}
|
4809 |
|
|
|
4810 |
|
|
/* Calculate the offset for each subsequent subreference. */
|
4811 |
|
|
for (; ref; ref = ref->next)
|
4812 |
|
|
{
|
4813 |
|
|
switch (ref->type)
|
4814 |
|
|
{
|
4815 |
|
|
case REF_COMPONENT:
|
4816 |
|
|
field = ref->u.c.component->backend_decl;
|
4817 |
|
|
gcc_assert (field && TREE_CODE (field) == FIELD_DECL);
|
4818 |
|
|
tmp = fold_build3 (COMPONENT_REF, TREE_TYPE (field),
|
4819 |
|
|
tmp, field, NULL_TREE);
|
4820 |
|
|
break;
|
4821 |
|
|
|
4822 |
|
|
case REF_SUBSTRING:
|
4823 |
|
|
gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE);
|
4824 |
|
|
gfc_init_se (&start, NULL);
|
4825 |
|
|
gfc_conv_expr_type (&start, ref->u.ss.start, gfc_charlen_type_node);
|
4826 |
|
|
gfc_add_block_to_block (block, &start.pre);
|
4827 |
|
|
tmp = gfc_build_array_ref (tmp, start.expr, NULL);
|
4828 |
|
|
break;
|
4829 |
|
|
|
4830 |
|
|
case REF_ARRAY:
|
4831 |
|
|
gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE
|
4832 |
|
|
&& ref->u.ar.type == AR_ELEMENT);
|
4833 |
|
|
|
4834 |
|
|
/* TODO - Add bounds checking. */
|
4835 |
|
|
stride = gfc_index_one_node;
|
4836 |
|
|
index = gfc_index_zero_node;
|
4837 |
|
|
for (n = 0; n < ref->u.ar.dimen; n++)
|
4838 |
|
|
{
|
4839 |
|
|
tree itmp;
|
4840 |
|
|
tree jtmp;
|
4841 |
|
|
|
4842 |
|
|
/* Update the index. */
|
4843 |
|
|
gfc_init_se (&start, NULL);
|
4844 |
|
|
gfc_conv_expr_type (&start, ref->u.ar.start[n], gfc_array_index_type);
|
4845 |
|
|
itmp = gfc_evaluate_now (start.expr, block);
|
4846 |
|
|
gfc_init_se (&start, NULL);
|
4847 |
|
|
gfc_conv_expr_type (&start, ref->u.ar.as->lower[n], gfc_array_index_type);
|
4848 |
|
|
jtmp = gfc_evaluate_now (start.expr, block);
|
4849 |
|
|
itmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, itmp, jtmp);
|
4850 |
|
|
itmp = fold_build2 (MULT_EXPR, gfc_array_index_type, itmp, stride);
|
4851 |
|
|
index = fold_build2 (PLUS_EXPR, gfc_array_index_type, itmp, index);
|
4852 |
|
|
index = gfc_evaluate_now (index, block);
|
4853 |
|
|
|
4854 |
|
|
/* Update the stride. */
|
4855 |
|
|
gfc_init_se (&start, NULL);
|
4856 |
|
|
gfc_conv_expr_type (&start, ref->u.ar.as->upper[n], gfc_array_index_type);
|
4857 |
|
|
itmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, start.expr, jtmp);
|
4858 |
|
|
itmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
4859 |
|
|
gfc_index_one_node, itmp);
|
4860 |
|
|
stride = fold_build2 (MULT_EXPR, gfc_array_index_type, stride, itmp);
|
4861 |
|
|
stride = gfc_evaluate_now (stride, block);
|
4862 |
|
|
}
|
4863 |
|
|
|
4864 |
|
|
/* Apply the index to obtain the array element. */
|
4865 |
|
|
tmp = gfc_build_array_ref (tmp, index, NULL);
|
4866 |
|
|
break;
|
4867 |
|
|
|
4868 |
|
|
default:
|
4869 |
|
|
gcc_unreachable ();
|
4870 |
|
|
break;
|
4871 |
|
|
}
|
4872 |
|
|
}
|
4873 |
|
|
}
|
4874 |
|
|
|
4875 |
|
|
/* Set the target data pointer. */
|
4876 |
|
|
offset = gfc_build_addr_expr (gfc_array_dataptr_type (desc), tmp);
|
4877 |
|
|
gfc_conv_descriptor_data_set (block, parm, offset);
|
4878 |
|
|
}
|
4879 |
|
|
|
4880 |
|
|
|
4881 |
|
|
/* gfc_conv_expr_descriptor needs the string length an expression
|
4882 |
|
|
so that the size of the temporary can be obtained. This is done
|
4883 |
|
|
by adding up the string lengths of all the elements in the
|
4884 |
|
|
expression. Function with non-constant expressions have their
|
4885 |
|
|
string lengths mapped onto the actual arguments using the
|
4886 |
|
|
interface mapping machinery in trans-expr.c. */
|
4887 |
|
|
static void
|
4888 |
|
|
get_array_charlen (gfc_expr *expr, gfc_se *se)
|
4889 |
|
|
{
|
4890 |
|
|
gfc_interface_mapping mapping;
|
4891 |
|
|
gfc_formal_arglist *formal;
|
4892 |
|
|
gfc_actual_arglist *arg;
|
4893 |
|
|
gfc_se tse;
|
4894 |
|
|
|
4895 |
|
|
if (expr->ts.u.cl->length
|
4896 |
|
|
&& gfc_is_constant_expr (expr->ts.u.cl->length))
|
4897 |
|
|
{
|
4898 |
|
|
if (!expr->ts.u.cl->backend_decl)
|
4899 |
|
|
gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
|
4900 |
|
|
return;
|
4901 |
|
|
}
|
4902 |
|
|
|
4903 |
|
|
switch (expr->expr_type)
|
4904 |
|
|
{
|
4905 |
|
|
case EXPR_OP:
|
4906 |
|
|
get_array_charlen (expr->value.op.op1, se);
|
4907 |
|
|
|
4908 |
|
|
/* For parentheses the expression ts.u.cl is identical. */
|
4909 |
|
|
if (expr->value.op.op == INTRINSIC_PARENTHESES)
|
4910 |
|
|
return;
|
4911 |
|
|
|
4912 |
|
|
expr->ts.u.cl->backend_decl =
|
4913 |
|
|
gfc_create_var (gfc_charlen_type_node, "sln");
|
4914 |
|
|
|
4915 |
|
|
if (expr->value.op.op2)
|
4916 |
|
|
{
|
4917 |
|
|
get_array_charlen (expr->value.op.op2, se);
|
4918 |
|
|
|
4919 |
|
|
gcc_assert (expr->value.op.op == INTRINSIC_CONCAT);
|
4920 |
|
|
|
4921 |
|
|
/* Add the string lengths and assign them to the expression
|
4922 |
|
|
string length backend declaration. */
|
4923 |
|
|
gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
|
4924 |
|
|
fold_build2 (PLUS_EXPR, gfc_charlen_type_node,
|
4925 |
|
|
expr->value.op.op1->ts.u.cl->backend_decl,
|
4926 |
|
|
expr->value.op.op2->ts.u.cl->backend_decl));
|
4927 |
|
|
}
|
4928 |
|
|
else
|
4929 |
|
|
gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
|
4930 |
|
|
expr->value.op.op1->ts.u.cl->backend_decl);
|
4931 |
|
|
break;
|
4932 |
|
|
|
4933 |
|
|
case EXPR_FUNCTION:
|
4934 |
|
|
if (expr->value.function.esym == NULL
|
4935 |
|
|
|| expr->ts.u.cl->length->expr_type == EXPR_CONSTANT)
|
4936 |
|
|
{
|
4937 |
|
|
gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
|
4938 |
|
|
break;
|
4939 |
|
|
}
|
4940 |
|
|
|
4941 |
|
|
/* Map expressions involving the dummy arguments onto the actual
|
4942 |
|
|
argument expressions. */
|
4943 |
|
|
gfc_init_interface_mapping (&mapping);
|
4944 |
|
|
formal = expr->symtree->n.sym->formal;
|
4945 |
|
|
arg = expr->value.function.actual;
|
4946 |
|
|
|
4947 |
|
|
/* Set se = NULL in the calls to the interface mapping, to suppress any
|
4948 |
|
|
backend stuff. */
|
4949 |
|
|
for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
|
4950 |
|
|
{
|
4951 |
|
|
if (!arg->expr)
|
4952 |
|
|
continue;
|
4953 |
|
|
if (formal->sym)
|
4954 |
|
|
gfc_add_interface_mapping (&mapping, formal->sym, NULL, arg->expr);
|
4955 |
|
|
}
|
4956 |
|
|
|
4957 |
|
|
gfc_init_se (&tse, NULL);
|
4958 |
|
|
|
4959 |
|
|
/* Build the expression for the character length and convert it. */
|
4960 |
|
|
gfc_apply_interface_mapping (&mapping, &tse, expr->ts.u.cl->length);
|
4961 |
|
|
|
4962 |
|
|
gfc_add_block_to_block (&se->pre, &tse.pre);
|
4963 |
|
|
gfc_add_block_to_block (&se->post, &tse.post);
|
4964 |
|
|
tse.expr = fold_convert (gfc_charlen_type_node, tse.expr);
|
4965 |
|
|
tse.expr = fold_build2 (MAX_EXPR, gfc_charlen_type_node, tse.expr,
|
4966 |
|
|
build_int_cst (gfc_charlen_type_node, 0));
|
4967 |
|
|
expr->ts.u.cl->backend_decl = tse.expr;
|
4968 |
|
|
gfc_free_interface_mapping (&mapping);
|
4969 |
|
|
break;
|
4970 |
|
|
|
4971 |
|
|
default:
|
4972 |
|
|
gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
|
4973 |
|
|
break;
|
4974 |
|
|
}
|
4975 |
|
|
}
|
4976 |
|
|
|
4977 |
|
|
|
4978 |
|
|
|
4979 |
|
|
/* Convert an array for passing as an actual argument. Expressions and
|
4980 |
|
|
vector subscripts are evaluated and stored in a temporary, which is then
|
4981 |
|
|
passed. For whole arrays the descriptor is passed. For array sections
|
4982 |
|
|
a modified copy of the descriptor is passed, but using the original data.
|
4983 |
|
|
|
4984 |
|
|
This function is also used for array pointer assignments, and there
|
4985 |
|
|
are three cases:
|
4986 |
|
|
|
4987 |
|
|
- se->want_pointer && !se->direct_byref
|
4988 |
|
|
EXPR is an actual argument. On exit, se->expr contains a
|
4989 |
|
|
pointer to the array descriptor.
|
4990 |
|
|
|
4991 |
|
|
- !se->want_pointer && !se->direct_byref
|
4992 |
|
|
EXPR is an actual argument to an intrinsic function or the
|
4993 |
|
|
left-hand side of a pointer assignment. On exit, se->expr
|
4994 |
|
|
contains the descriptor for EXPR.
|
4995 |
|
|
|
4996 |
|
|
- !se->want_pointer && se->direct_byref
|
4997 |
|
|
EXPR is the right-hand side of a pointer assignment and
|
4998 |
|
|
se->expr is the descriptor for the previously-evaluated
|
4999 |
|
|
left-hand side. The function creates an assignment from
|
5000 |
|
|
EXPR to se->expr. */
|
5001 |
|
|
|
5002 |
|
|
void
|
5003 |
|
|
gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss)
|
5004 |
|
|
{
|
5005 |
|
|
gfc_loopinfo loop;
|
5006 |
|
|
gfc_ss *secss;
|
5007 |
|
|
gfc_ss_info *info;
|
5008 |
|
|
int need_tmp;
|
5009 |
|
|
int n;
|
5010 |
|
|
tree tmp;
|
5011 |
|
|
tree desc;
|
5012 |
|
|
stmtblock_t block;
|
5013 |
|
|
tree start;
|
5014 |
|
|
tree offset;
|
5015 |
|
|
int full;
|
5016 |
|
|
bool subref_array_target = false;
|
5017 |
|
|
|
5018 |
|
|
gcc_assert (ss != gfc_ss_terminator);
|
5019 |
|
|
|
5020 |
|
|
/* Special case things we know we can pass easily. */
|
5021 |
|
|
switch (expr->expr_type)
|
5022 |
|
|
{
|
5023 |
|
|
case EXPR_VARIABLE:
|
5024 |
|
|
/* If we have a linear array section, we can pass it directly.
|
5025 |
|
|
Otherwise we need to copy it into a temporary. */
|
5026 |
|
|
|
5027 |
|
|
/* Find the SS for the array section. */
|
5028 |
|
|
secss = ss;
|
5029 |
|
|
while (secss != gfc_ss_terminator && secss->type != GFC_SS_SECTION)
|
5030 |
|
|
secss = secss->next;
|
5031 |
|
|
|
5032 |
|
|
gcc_assert (secss != gfc_ss_terminator);
|
5033 |
|
|
info = &secss->data.info;
|
5034 |
|
|
|
5035 |
|
|
/* Get the descriptor for the array. */
|
5036 |
|
|
gfc_conv_ss_descriptor (&se->pre, secss, 0);
|
5037 |
|
|
desc = info->descriptor;
|
5038 |
|
|
|
5039 |
|
|
subref_array_target = se->direct_byref && is_subref_array (expr);
|
5040 |
|
|
need_tmp = gfc_ref_needs_temporary_p (expr->ref)
|
5041 |
|
|
&& !subref_array_target;
|
5042 |
|
|
|
5043 |
|
|
if (need_tmp)
|
5044 |
|
|
full = 0;
|
5045 |
|
|
else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
|
5046 |
|
|
{
|
5047 |
|
|
/* Create a new descriptor if the array doesn't have one. */
|
5048 |
|
|
full = 0;
|
5049 |
|
|
}
|
5050 |
|
|
else if (info->ref->u.ar.type == AR_FULL)
|
5051 |
|
|
full = 1;
|
5052 |
|
|
else if (se->direct_byref)
|
5053 |
|
|
full = 0;
|
5054 |
|
|
else
|
5055 |
|
|
full = gfc_full_array_ref_p (info->ref, NULL);
|
5056 |
|
|
|
5057 |
|
|
if (full)
|
5058 |
|
|
{
|
5059 |
|
|
if (se->direct_byref)
|
5060 |
|
|
{
|
5061 |
|
|
/* Copy the descriptor for pointer assignments. */
|
5062 |
|
|
gfc_add_modify (&se->pre, se->expr, desc);
|
5063 |
|
|
|
5064 |
|
|
/* Add any offsets from subreferences. */
|
5065 |
|
|
gfc_get_dataptr_offset (&se->pre, se->expr, desc, NULL_TREE,
|
5066 |
|
|
subref_array_target, expr);
|
5067 |
|
|
}
|
5068 |
|
|
else if (se->want_pointer)
|
5069 |
|
|
{
|
5070 |
|
|
/* We pass full arrays directly. This means that pointers and
|
5071 |
|
|
allocatable arrays should also work. */
|
5072 |
|
|
se->expr = gfc_build_addr_expr (NULL_TREE, desc);
|
5073 |
|
|
}
|
5074 |
|
|
else
|
5075 |
|
|
{
|
5076 |
|
|
se->expr = desc;
|
5077 |
|
|
}
|
5078 |
|
|
|
5079 |
|
|
if (expr->ts.type == BT_CHARACTER)
|
5080 |
|
|
se->string_length = gfc_get_expr_charlen (expr);
|
5081 |
|
|
|
5082 |
|
|
return;
|
5083 |
|
|
}
|
5084 |
|
|
break;
|
5085 |
|
|
|
5086 |
|
|
case EXPR_FUNCTION:
|
5087 |
|
|
/* A transformational function return value will be a temporary
|
5088 |
|
|
array descriptor. We still need to go through the scalarizer
|
5089 |
|
|
to create the descriptor. Elemental functions ar handled as
|
5090 |
|
|
arbitrary expressions, i.e. copy to a temporary. */
|
5091 |
|
|
secss = ss;
|
5092 |
|
|
/* Look for the SS for this function. */
|
5093 |
|
|
while (secss != gfc_ss_terminator
|
5094 |
|
|
&& (secss->type != GFC_SS_FUNCTION || secss->expr != expr))
|
5095 |
|
|
secss = secss->next;
|
5096 |
|
|
|
5097 |
|
|
if (se->direct_byref)
|
5098 |
|
|
{
|
5099 |
|
|
gcc_assert (secss != gfc_ss_terminator);
|
5100 |
|
|
|
5101 |
|
|
/* For pointer assignments pass the descriptor directly. */
|
5102 |
|
|
se->ss = secss;
|
5103 |
|
|
se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
|
5104 |
|
|
gfc_conv_expr (se, expr);
|
5105 |
|
|
return;
|
5106 |
|
|
}
|
5107 |
|
|
|
5108 |
|
|
if (secss == gfc_ss_terminator)
|
5109 |
|
|
{
|
5110 |
|
|
/* Elemental function. */
|
5111 |
|
|
need_tmp = 1;
|
5112 |
|
|
if (expr->ts.type == BT_CHARACTER
|
5113 |
|
|
&& expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
|
5114 |
|
|
get_array_charlen (expr, se);
|
5115 |
|
|
|
5116 |
|
|
info = NULL;
|
5117 |
|
|
}
|
5118 |
|
|
else
|
5119 |
|
|
{
|
5120 |
|
|
/* Transformational function. */
|
5121 |
|
|
info = &secss->data.info;
|
5122 |
|
|
need_tmp = 0;
|
5123 |
|
|
}
|
5124 |
|
|
break;
|
5125 |
|
|
|
5126 |
|
|
case EXPR_ARRAY:
|
5127 |
|
|
/* Constant array constructors don't need a temporary. */
|
5128 |
|
|
if (ss->type == GFC_SS_CONSTRUCTOR
|
5129 |
|
|
&& expr->ts.type != BT_CHARACTER
|
5130 |
|
|
&& gfc_constant_array_constructor_p (expr->value.constructor))
|
5131 |
|
|
{
|
5132 |
|
|
need_tmp = 0;
|
5133 |
|
|
info = &ss->data.info;
|
5134 |
|
|
secss = ss;
|
5135 |
|
|
}
|
5136 |
|
|
else
|
5137 |
|
|
{
|
5138 |
|
|
need_tmp = 1;
|
5139 |
|
|
secss = NULL;
|
5140 |
|
|
info = NULL;
|
5141 |
|
|
}
|
5142 |
|
|
break;
|
5143 |
|
|
|
5144 |
|
|
default:
|
5145 |
|
|
/* Something complicated. Copy it into a temporary. */
|
5146 |
|
|
need_tmp = 1;
|
5147 |
|
|
secss = NULL;
|
5148 |
|
|
info = NULL;
|
5149 |
|
|
break;
|
5150 |
|
|
}
|
5151 |
|
|
|
5152 |
|
|
gfc_init_loopinfo (&loop);
|
5153 |
|
|
|
5154 |
|
|
/* Associate the SS with the loop. */
|
5155 |
|
|
gfc_add_ss_to_loop (&loop, ss);
|
5156 |
|
|
|
5157 |
|
|
/* Tell the scalarizer not to bother creating loop variables, etc. */
|
5158 |
|
|
if (!need_tmp)
|
5159 |
|
|
loop.array_parameter = 1;
|
5160 |
|
|
else
|
5161 |
|
|
/* The right-hand side of a pointer assignment mustn't use a temporary. */
|
5162 |
|
|
gcc_assert (!se->direct_byref);
|
5163 |
|
|
|
5164 |
|
|
/* Setup the scalarizing loops and bounds. */
|
5165 |
|
|
gfc_conv_ss_startstride (&loop);
|
5166 |
|
|
|
5167 |
|
|
if (need_tmp)
|
5168 |
|
|
{
|
5169 |
|
|
/* Tell the scalarizer to make a temporary. */
|
5170 |
|
|
loop.temp_ss = gfc_get_ss ();
|
5171 |
|
|
loop.temp_ss->type = GFC_SS_TEMP;
|
5172 |
|
|
loop.temp_ss->next = gfc_ss_terminator;
|
5173 |
|
|
|
5174 |
|
|
if (expr->ts.type == BT_CHARACTER
|
5175 |
|
|
&& !expr->ts.u.cl->backend_decl)
|
5176 |
|
|
get_array_charlen (expr, se);
|
5177 |
|
|
|
5178 |
|
|
loop.temp_ss->data.temp.type = gfc_typenode_for_spec (&expr->ts);
|
5179 |
|
|
|
5180 |
|
|
if (expr->ts.type == BT_CHARACTER)
|
5181 |
|
|
loop.temp_ss->string_length = expr->ts.u.cl->backend_decl;
|
5182 |
|
|
else
|
5183 |
|
|
loop.temp_ss->string_length = NULL;
|
5184 |
|
|
|
5185 |
|
|
se->string_length = loop.temp_ss->string_length;
|
5186 |
|
|
loop.temp_ss->data.temp.dimen = loop.dimen;
|
5187 |
|
|
gfc_add_ss_to_loop (&loop, loop.temp_ss);
|
5188 |
|
|
}
|
5189 |
|
|
|
5190 |
|
|
gfc_conv_loop_setup (&loop, & expr->where);
|
5191 |
|
|
|
5192 |
|
|
if (need_tmp)
|
5193 |
|
|
{
|
5194 |
|
|
/* Copy into a temporary and pass that. We don't need to copy the data
|
5195 |
|
|
back because expressions and vector subscripts must be INTENT_IN. */
|
5196 |
|
|
/* TODO: Optimize passing function return values. */
|
5197 |
|
|
gfc_se lse;
|
5198 |
|
|
gfc_se rse;
|
5199 |
|
|
|
5200 |
|
|
/* Start the copying loops. */
|
5201 |
|
|
gfc_mark_ss_chain_used (loop.temp_ss, 1);
|
5202 |
|
|
gfc_mark_ss_chain_used (ss, 1);
|
5203 |
|
|
gfc_start_scalarized_body (&loop, &block);
|
5204 |
|
|
|
5205 |
|
|
/* Copy each data element. */
|
5206 |
|
|
gfc_init_se (&lse, NULL);
|
5207 |
|
|
gfc_copy_loopinfo_to_se (&lse, &loop);
|
5208 |
|
|
gfc_init_se (&rse, NULL);
|
5209 |
|
|
gfc_copy_loopinfo_to_se (&rse, &loop);
|
5210 |
|
|
|
5211 |
|
|
lse.ss = loop.temp_ss;
|
5212 |
|
|
rse.ss = ss;
|
5213 |
|
|
|
5214 |
|
|
gfc_conv_scalarized_array_ref (&lse, NULL);
|
5215 |
|
|
if (expr->ts.type == BT_CHARACTER)
|
5216 |
|
|
{
|
5217 |
|
|
gfc_conv_expr (&rse, expr);
|
5218 |
|
|
if (POINTER_TYPE_P (TREE_TYPE (rse.expr)))
|
5219 |
|
|
rse.expr = build_fold_indirect_ref_loc (input_location,
|
5220 |
|
|
rse.expr);
|
5221 |
|
|
}
|
5222 |
|
|
else
|
5223 |
|
|
gfc_conv_expr_val (&rse, expr);
|
5224 |
|
|
|
5225 |
|
|
gfc_add_block_to_block (&block, &rse.pre);
|
5226 |
|
|
gfc_add_block_to_block (&block, &lse.pre);
|
5227 |
|
|
|
5228 |
|
|
lse.string_length = rse.string_length;
|
5229 |
|
|
tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, true,
|
5230 |
|
|
expr->expr_type == EXPR_VARIABLE);
|
5231 |
|
|
gfc_add_expr_to_block (&block, tmp);
|
5232 |
|
|
|
5233 |
|
|
/* Finish the copying loops. */
|
5234 |
|
|
gfc_trans_scalarizing_loops (&loop, &block);
|
5235 |
|
|
|
5236 |
|
|
desc = loop.temp_ss->data.info.descriptor;
|
5237 |
|
|
|
5238 |
|
|
gcc_assert (is_gimple_lvalue (desc));
|
5239 |
|
|
}
|
5240 |
|
|
else if (expr->expr_type == EXPR_FUNCTION)
|
5241 |
|
|
{
|
5242 |
|
|
desc = info->descriptor;
|
5243 |
|
|
se->string_length = ss->string_length;
|
5244 |
|
|
}
|
5245 |
|
|
else
|
5246 |
|
|
{
|
5247 |
|
|
/* We pass sections without copying to a temporary. Make a new
|
5248 |
|
|
descriptor and point it at the section we want. The loop variable
|
5249 |
|
|
limits will be the limits of the section.
|
5250 |
|
|
A function may decide to repack the array to speed up access, but
|
5251 |
|
|
we're not bothered about that here. */
|
5252 |
|
|
int dim, ndim;
|
5253 |
|
|
tree parm;
|
5254 |
|
|
tree parmtype;
|
5255 |
|
|
tree stride;
|
5256 |
|
|
tree from;
|
5257 |
|
|
tree to;
|
5258 |
|
|
tree base;
|
5259 |
|
|
|
5260 |
|
|
/* Set the string_length for a character array. */
|
5261 |
|
|
if (expr->ts.type == BT_CHARACTER)
|
5262 |
|
|
se->string_length = gfc_get_expr_charlen (expr);
|
5263 |
|
|
|
5264 |
|
|
desc = info->descriptor;
|
5265 |
|
|
gcc_assert (secss && secss != gfc_ss_terminator);
|
5266 |
|
|
if (se->direct_byref)
|
5267 |
|
|
{
|
5268 |
|
|
/* For pointer assignments we fill in the destination. */
|
5269 |
|
|
parm = se->expr;
|
5270 |
|
|
parmtype = TREE_TYPE (parm);
|
5271 |
|
|
}
|
5272 |
|
|
else
|
5273 |
|
|
{
|
5274 |
|
|
/* Otherwise make a new one. */
|
5275 |
|
|
parmtype = gfc_get_element_type (TREE_TYPE (desc));
|
5276 |
|
|
parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
|
5277 |
|
|
loop.from, loop.to, 0,
|
5278 |
|
|
GFC_ARRAY_UNKNOWN, false);
|
5279 |
|
|
parm = gfc_create_var (parmtype, "parm");
|
5280 |
|
|
}
|
5281 |
|
|
|
5282 |
|
|
offset = gfc_index_zero_node;
|
5283 |
|
|
dim = 0;
|
5284 |
|
|
|
5285 |
|
|
/* The following can be somewhat confusing. We have two
|
5286 |
|
|
descriptors, a new one and the original array.
|
5287 |
|
|
{parm, parmtype, dim} refer to the new one.
|
5288 |
|
|
{desc, type, n, secss, loop} refer to the original, which maybe
|
5289 |
|
|
a descriptorless array.
|
5290 |
|
|
The bounds of the scalarization are the bounds of the section.
|
5291 |
|
|
We don't have to worry about numeric overflows when calculating
|
5292 |
|
|
the offsets because all elements are within the array data. */
|
5293 |
|
|
|
5294 |
|
|
/* Set the dtype. */
|
5295 |
|
|
tmp = gfc_conv_descriptor_dtype (parm);
|
5296 |
|
|
gfc_add_modify (&loop.pre, tmp, gfc_get_dtype (parmtype));
|
5297 |
|
|
|
5298 |
|
|
/* Set offset for assignments to pointer only to zero if it is not
|
5299 |
|
|
the full array. */
|
5300 |
|
|
if (se->direct_byref
|
5301 |
|
|
&& info->ref && info->ref->u.ar.type != AR_FULL)
|
5302 |
|
|
base = gfc_index_zero_node;
|
5303 |
|
|
else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
|
5304 |
|
|
base = gfc_evaluate_now (gfc_conv_array_offset (desc), &loop.pre);
|
5305 |
|
|
else
|
5306 |
|
|
base = NULL_TREE;
|
5307 |
|
|
|
5308 |
|
|
ndim = info->ref ? info->ref->u.ar.dimen : info->dimen;
|
5309 |
|
|
for (n = 0; n < ndim; n++)
|
5310 |
|
|
{
|
5311 |
|
|
stride = gfc_conv_array_stride (desc, n);
|
5312 |
|
|
|
5313 |
|
|
/* Work out the offset. */
|
5314 |
|
|
if (info->ref
|
5315 |
|
|
&& info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
|
5316 |
|
|
{
|
5317 |
|
|
gcc_assert (info->subscript[n]
|
5318 |
|
|
&& info->subscript[n]->type == GFC_SS_SCALAR);
|
5319 |
|
|
start = info->subscript[n]->data.scalar.expr;
|
5320 |
|
|
}
|
5321 |
|
|
else
|
5322 |
|
|
{
|
5323 |
|
|
/* Check we haven't somehow got out of sync. */
|
5324 |
|
|
gcc_assert (info->dim[dim] == n);
|
5325 |
|
|
|
5326 |
|
|
/* Evaluate and remember the start of the section. */
|
5327 |
|
|
start = info->start[dim];
|
5328 |
|
|
stride = gfc_evaluate_now (stride, &loop.pre);
|
5329 |
|
|
}
|
5330 |
|
|
|
5331 |
|
|
tmp = gfc_conv_array_lbound (desc, n);
|
5332 |
|
|
tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (tmp), start, tmp);
|
5333 |
|
|
|
5334 |
|
|
tmp = fold_build2 (MULT_EXPR, TREE_TYPE (tmp), tmp, stride);
|
5335 |
|
|
offset = fold_build2 (PLUS_EXPR, TREE_TYPE (tmp), offset, tmp);
|
5336 |
|
|
|
5337 |
|
|
if (info->ref
|
5338 |
|
|
&& info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
|
5339 |
|
|
{
|
5340 |
|
|
/* For elemental dimensions, we only need the offset. */
|
5341 |
|
|
continue;
|
5342 |
|
|
}
|
5343 |
|
|
|
5344 |
|
|
/* Vector subscripts need copying and are handled elsewhere. */
|
5345 |
|
|
if (info->ref)
|
5346 |
|
|
gcc_assert (info->ref->u.ar.dimen_type[n] == DIMEN_RANGE);
|
5347 |
|
|
|
5348 |
|
|
/* Set the new lower bound. */
|
5349 |
|
|
from = loop.from[dim];
|
5350 |
|
|
to = loop.to[dim];
|
5351 |
|
|
|
5352 |
|
|
/* If we have an array section or are assigning make sure that
|
5353 |
|
|
the lower bound is 1. References to the full
|
5354 |
|
|
array should otherwise keep the original bounds. */
|
5355 |
|
|
if ((!info->ref
|
5356 |
|
|
|| info->ref->u.ar.type != AR_FULL)
|
5357 |
|
|
&& !integer_onep (from))
|
5358 |
|
|
{
|
5359 |
|
|
tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
5360 |
|
|
gfc_index_one_node, from);
|
5361 |
|
|
to = fold_build2 (PLUS_EXPR, gfc_array_index_type, to, tmp);
|
5362 |
|
|
from = gfc_index_one_node;
|
5363 |
|
|
}
|
5364 |
|
|
gfc_conv_descriptor_lbound_set (&loop.pre, parm,
|
5365 |
|
|
gfc_rank_cst[dim], from);
|
5366 |
|
|
|
5367 |
|
|
/* Set the new upper bound. */
|
5368 |
|
|
gfc_conv_descriptor_ubound_set (&loop.pre, parm,
|
5369 |
|
|
gfc_rank_cst[dim], to);
|
5370 |
|
|
|
5371 |
|
|
/* Multiply the stride by the section stride to get the
|
5372 |
|
|
total stride. */
|
5373 |
|
|
stride = fold_build2 (MULT_EXPR, gfc_array_index_type,
|
5374 |
|
|
stride, info->stride[dim]);
|
5375 |
|
|
|
5376 |
|
|
if (se->direct_byref
|
5377 |
|
|
&& info->ref
|
5378 |
|
|
&& info->ref->u.ar.type != AR_FULL)
|
5379 |
|
|
{
|
5380 |
|
|
base = fold_build2 (MINUS_EXPR, TREE_TYPE (base),
|
5381 |
|
|
base, stride);
|
5382 |
|
|
}
|
5383 |
|
|
else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
|
5384 |
|
|
{
|
5385 |
|
|
tmp = gfc_conv_array_lbound (desc, n);
|
5386 |
|
|
tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (base),
|
5387 |
|
|
tmp, loop.from[dim]);
|
5388 |
|
|
tmp = fold_build2 (MULT_EXPR, TREE_TYPE (base),
|
5389 |
|
|
tmp, gfc_conv_array_stride (desc, n));
|
5390 |
|
|
base = fold_build2 (PLUS_EXPR, TREE_TYPE (base),
|
5391 |
|
|
tmp, base);
|
5392 |
|
|
}
|
5393 |
|
|
|
5394 |
|
|
/* Store the new stride. */
|
5395 |
|
|
gfc_conv_descriptor_stride_set (&loop.pre, parm,
|
5396 |
|
|
gfc_rank_cst[dim], stride);
|
5397 |
|
|
|
5398 |
|
|
dim++;
|
5399 |
|
|
}
|
5400 |
|
|
|
5401 |
|
|
if (se->data_not_needed)
|
5402 |
|
|
gfc_conv_descriptor_data_set (&loop.pre, parm,
|
5403 |
|
|
gfc_index_zero_node);
|
5404 |
|
|
else
|
5405 |
|
|
/* Point the data pointer at the 1st element in the section. */
|
5406 |
|
|
gfc_get_dataptr_offset (&loop.pre, parm, desc, offset,
|
5407 |
|
|
subref_array_target, expr);
|
5408 |
|
|
|
5409 |
|
|
if ((se->direct_byref || GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
|
5410 |
|
|
&& !se->data_not_needed)
|
5411 |
|
|
{
|
5412 |
|
|
/* Set the offset. */
|
5413 |
|
|
gfc_conv_descriptor_offset_set (&loop.pre, parm, base);
|
5414 |
|
|
}
|
5415 |
|
|
else
|
5416 |
|
|
{
|
5417 |
|
|
/* Only the callee knows what the correct offset it, so just set
|
5418 |
|
|
it to zero here. */
|
5419 |
|
|
gfc_conv_descriptor_offset_set (&loop.pre, parm, gfc_index_zero_node);
|
5420 |
|
|
}
|
5421 |
|
|
desc = parm;
|
5422 |
|
|
}
|
5423 |
|
|
|
5424 |
|
|
if (!se->direct_byref)
|
5425 |
|
|
{
|
5426 |
|
|
/* Get a pointer to the new descriptor. */
|
5427 |
|
|
if (se->want_pointer)
|
5428 |
|
|
se->expr = gfc_build_addr_expr (NULL_TREE, desc);
|
5429 |
|
|
else
|
5430 |
|
|
se->expr = desc;
|
5431 |
|
|
}
|
5432 |
|
|
|
5433 |
|
|
gfc_add_block_to_block (&se->pre, &loop.pre);
|
5434 |
|
|
gfc_add_block_to_block (&se->post, &loop.post);
|
5435 |
|
|
|
5436 |
|
|
/* Cleanup the scalarizer. */
|
5437 |
|
|
gfc_cleanup_loop (&loop);
|
5438 |
|
|
}
|
5439 |
|
|
|
5440 |
|
|
/* Helper function for gfc_conv_array_parameter if array size needs to be
|
5441 |
|
|
computed. */
|
5442 |
|
|
|
5443 |
|
|
static void
|
5444 |
|
|
array_parameter_size (tree desc, gfc_expr *expr, tree *size)
|
5445 |
|
|
{
|
5446 |
|
|
tree elem;
|
5447 |
|
|
if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
|
5448 |
|
|
*size = GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc));
|
5449 |
|
|
else if (expr->rank > 1)
|
5450 |
|
|
*size = build_call_expr_loc (input_location,
|
5451 |
|
|
gfor_fndecl_size0, 1,
|
5452 |
|
|
gfc_build_addr_expr (NULL, desc));
|
5453 |
|
|
else
|
5454 |
|
|
{
|
5455 |
|
|
tree ubound = gfc_conv_descriptor_ubound_get (desc, gfc_index_zero_node);
|
5456 |
|
|
tree lbound = gfc_conv_descriptor_lbound_get (desc, gfc_index_zero_node);
|
5457 |
|
|
|
5458 |
|
|
*size = fold_build2 (MINUS_EXPR, gfc_array_index_type, ubound, lbound);
|
5459 |
|
|
*size = fold_build2 (PLUS_EXPR, gfc_array_index_type, *size,
|
5460 |
|
|
gfc_index_one_node);
|
5461 |
|
|
*size = fold_build2 (MAX_EXPR, gfc_array_index_type, *size,
|
5462 |
|
|
gfc_index_zero_node);
|
5463 |
|
|
}
|
5464 |
|
|
elem = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
|
5465 |
|
|
*size = fold_build2 (MULT_EXPR, gfc_array_index_type, *size,
|
5466 |
|
|
fold_convert (gfc_array_index_type, elem));
|
5467 |
|
|
}
|
5468 |
|
|
|
5469 |
|
|
/* Convert an array for passing as an actual parameter. */
|
5470 |
|
|
/* TODO: Optimize passing g77 arrays. */
|
5471 |
|
|
|
5472 |
|
|
void
|
5473 |
|
|
gfc_conv_array_parameter (gfc_se * se, gfc_expr * expr, gfc_ss * ss, bool g77,
|
5474 |
|
|
const gfc_symbol *fsym, const char *proc_name,
|
5475 |
|
|
tree *size)
|
5476 |
|
|
{
|
5477 |
|
|
tree ptr;
|
5478 |
|
|
tree desc;
|
5479 |
|
|
tree tmp = NULL_TREE;
|
5480 |
|
|
tree stmt;
|
5481 |
|
|
tree parent = DECL_CONTEXT (current_function_decl);
|
5482 |
|
|
bool full_array_var;
|
5483 |
|
|
bool this_array_result;
|
5484 |
|
|
bool contiguous;
|
5485 |
|
|
bool no_pack;
|
5486 |
|
|
bool array_constructor;
|
5487 |
|
|
bool good_allocatable;
|
5488 |
|
|
bool ultimate_ptr_comp;
|
5489 |
|
|
bool ultimate_alloc_comp;
|
5490 |
|
|
gfc_symbol *sym;
|
5491 |
|
|
stmtblock_t block;
|
5492 |
|
|
gfc_ref *ref;
|
5493 |
|
|
|
5494 |
|
|
ultimate_ptr_comp = false;
|
5495 |
|
|
ultimate_alloc_comp = false;
|
5496 |
|
|
for (ref = expr->ref; ref; ref = ref->next)
|
5497 |
|
|
{
|
5498 |
|
|
if (ref->next == NULL)
|
5499 |
|
|
break;
|
5500 |
|
|
|
5501 |
|
|
if (ref->type == REF_COMPONENT)
|
5502 |
|
|
{
|
5503 |
|
|
ultimate_ptr_comp = ref->u.c.component->attr.pointer;
|
5504 |
|
|
ultimate_alloc_comp = ref->u.c.component->attr.allocatable;
|
5505 |
|
|
}
|
5506 |
|
|
}
|
5507 |
|
|
|
5508 |
|
|
full_array_var = false;
|
5509 |
|
|
contiguous = false;
|
5510 |
|
|
|
5511 |
|
|
if (expr->expr_type == EXPR_VARIABLE && ref && !ultimate_ptr_comp)
|
5512 |
|
|
full_array_var = gfc_full_array_ref_p (ref, &contiguous);
|
5513 |
|
|
|
5514 |
|
|
sym = full_array_var ? expr->symtree->n.sym : NULL;
|
5515 |
|
|
|
5516 |
|
|
/* The symbol should have an array specification. */
|
5517 |
|
|
gcc_assert (!sym || sym->as || ref->u.ar.as);
|
5518 |
|
|
|
5519 |
|
|
if (expr->expr_type == EXPR_ARRAY && expr->ts.type == BT_CHARACTER)
|
5520 |
|
|
{
|
5521 |
|
|
get_array_ctor_strlen (&se->pre, expr->value.constructor, &tmp);
|
5522 |
|
|
expr->ts.u.cl->backend_decl = tmp;
|
5523 |
|
|
se->string_length = tmp;
|
5524 |
|
|
}
|
5525 |
|
|
|
5526 |
|
|
/* Is this the result of the enclosing procedure? */
|
5527 |
|
|
this_array_result = (full_array_var && sym->attr.flavor == FL_PROCEDURE);
|
5528 |
|
|
if (this_array_result
|
5529 |
|
|
&& (sym->backend_decl != current_function_decl)
|
5530 |
|
|
&& (sym->backend_decl != parent))
|
5531 |
|
|
this_array_result = false;
|
5532 |
|
|
|
5533 |
|
|
/* Passing address of the array if it is not pointer or assumed-shape. */
|
5534 |
|
|
if (full_array_var && g77 && !this_array_result)
|
5535 |
|
|
{
|
5536 |
|
|
tmp = gfc_get_symbol_decl (sym);
|
5537 |
|
|
|
5538 |
|
|
if (sym->ts.type == BT_CHARACTER)
|
5539 |
|
|
se->string_length = sym->ts.u.cl->backend_decl;
|
5540 |
|
|
|
5541 |
|
|
if (sym->ts.type == BT_DERIVED)
|
5542 |
|
|
{
|
5543 |
|
|
gfc_conv_expr_descriptor (se, expr, ss);
|
5544 |
|
|
se->expr = gfc_conv_array_data (se->expr);
|
5545 |
|
|
return;
|
5546 |
|
|
}
|
5547 |
|
|
|
5548 |
|
|
if (!sym->attr.pointer
|
5549 |
|
|
&& sym->as
|
5550 |
|
|
&& sym->as->type != AS_ASSUMED_SHAPE
|
5551 |
|
|
&& !sym->attr.allocatable)
|
5552 |
|
|
{
|
5553 |
|
|
/* Some variables are declared directly, others are declared as
|
5554 |
|
|
pointers and allocated on the heap. */
|
5555 |
|
|
if (sym->attr.dummy || POINTER_TYPE_P (TREE_TYPE (tmp)))
|
5556 |
|
|
se->expr = tmp;
|
5557 |
|
|
else
|
5558 |
|
|
se->expr = gfc_build_addr_expr (NULL_TREE, tmp);
|
5559 |
|
|
if (size)
|
5560 |
|
|
array_parameter_size (tmp, expr, size);
|
5561 |
|
|
return;
|
5562 |
|
|
}
|
5563 |
|
|
|
5564 |
|
|
if (sym->attr.allocatable)
|
5565 |
|
|
{
|
5566 |
|
|
if (sym->attr.dummy || sym->attr.result)
|
5567 |
|
|
{
|
5568 |
|
|
gfc_conv_expr_descriptor (se, expr, ss);
|
5569 |
|
|
tmp = se->expr;
|
5570 |
|
|
}
|
5571 |
|
|
if (size)
|
5572 |
|
|
array_parameter_size (tmp, expr, size);
|
5573 |
|
|
se->expr = gfc_conv_array_data (tmp);
|
5574 |
|
|
return;
|
5575 |
|
|
}
|
5576 |
|
|
}
|
5577 |
|
|
|
5578 |
|
|
/* A convenient reduction in scope. */
|
5579 |
|
|
contiguous = g77 && !this_array_result && contiguous;
|
5580 |
|
|
|
5581 |
|
|
/* There is no need to pack and unpack the array, if it is contiguous
|
5582 |
|
|
and not deferred or assumed shape. */
|
5583 |
|
|
no_pack = ((sym && sym->as
|
5584 |
|
|
&& !sym->attr.pointer
|
5585 |
|
|
&& sym->as->type != AS_DEFERRED
|
5586 |
|
|
&& sym->as->type != AS_ASSUMED_SHAPE)
|
5587 |
|
|
||
|
5588 |
|
|
(ref && ref->u.ar.as
|
5589 |
|
|
&& ref->u.ar.as->type != AS_DEFERRED
|
5590 |
|
|
&& ref->u.ar.as->type != AS_ASSUMED_SHAPE));
|
5591 |
|
|
|
5592 |
|
|
no_pack = contiguous && no_pack;
|
5593 |
|
|
|
5594 |
|
|
/* Array constructors are always contiguous and do not need packing. */
|
5595 |
|
|
array_constructor = g77 && !this_array_result && expr->expr_type == EXPR_ARRAY;
|
5596 |
|
|
|
5597 |
|
|
/* Same is true of contiguous sections from allocatable variables. */
|
5598 |
|
|
good_allocatable = contiguous
|
5599 |
|
|
&& expr->symtree
|
5600 |
|
|
&& expr->symtree->n.sym->attr.allocatable;
|
5601 |
|
|
|
5602 |
|
|
/* Or ultimate allocatable components. */
|
5603 |
|
|
ultimate_alloc_comp = contiguous && ultimate_alloc_comp;
|
5604 |
|
|
|
5605 |
|
|
if (no_pack || array_constructor || good_allocatable || ultimate_alloc_comp)
|
5606 |
|
|
{
|
5607 |
|
|
gfc_conv_expr_descriptor (se, expr, ss);
|
5608 |
|
|
if (expr->ts.type == BT_CHARACTER)
|
5609 |
|
|
se->string_length = expr->ts.u.cl->backend_decl;
|
5610 |
|
|
if (size)
|
5611 |
|
|
array_parameter_size (se->expr, expr, size);
|
5612 |
|
|
se->expr = gfc_conv_array_data (se->expr);
|
5613 |
|
|
return;
|
5614 |
|
|
}
|
5615 |
|
|
|
5616 |
|
|
if (this_array_result)
|
5617 |
|
|
{
|
5618 |
|
|
/* Result of the enclosing function. */
|
5619 |
|
|
gfc_conv_expr_descriptor (se, expr, ss);
|
5620 |
|
|
if (size)
|
5621 |
|
|
array_parameter_size (se->expr, expr, size);
|
5622 |
|
|
se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
|
5623 |
|
|
|
5624 |
|
|
if (g77 && TREE_TYPE (TREE_TYPE (se->expr)) != NULL_TREE
|
5625 |
|
|
&& GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr))))
|
5626 |
|
|
se->expr = gfc_conv_array_data (build_fold_indirect_ref_loc (input_location,
|
5627 |
|
|
se->expr));
|
5628 |
|
|
|
5629 |
|
|
return;
|
5630 |
|
|
}
|
5631 |
|
|
else
|
5632 |
|
|
{
|
5633 |
|
|
/* Every other type of array. */
|
5634 |
|
|
se->want_pointer = 1;
|
5635 |
|
|
gfc_conv_expr_descriptor (se, expr, ss);
|
5636 |
|
|
if (size)
|
5637 |
|
|
array_parameter_size (build_fold_indirect_ref_loc (input_location,
|
5638 |
|
|
se->expr),
|
5639 |
|
|
expr, size);
|
5640 |
|
|
}
|
5641 |
|
|
|
5642 |
|
|
/* Deallocate the allocatable components of structures that are
|
5643 |
|
|
not variable. */
|
5644 |
|
|
if (expr->ts.type == BT_DERIVED
|
5645 |
|
|
&& expr->ts.u.derived->attr.alloc_comp
|
5646 |
|
|
&& expr->expr_type != EXPR_VARIABLE)
|
5647 |
|
|
{
|
5648 |
|
|
tmp = build_fold_indirect_ref_loc (input_location,
|
5649 |
|
|
se->expr);
|
5650 |
|
|
tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, tmp, expr->rank);
|
5651 |
|
|
gfc_add_expr_to_block (&se->post, tmp);
|
5652 |
|
|
}
|
5653 |
|
|
|
5654 |
|
|
if (g77)
|
5655 |
|
|
{
|
5656 |
|
|
desc = se->expr;
|
5657 |
|
|
/* Repack the array. */
|
5658 |
|
|
if (gfc_option.warn_array_temp)
|
5659 |
|
|
{
|
5660 |
|
|
if (fsym)
|
5661 |
|
|
gfc_warning ("Creating array temporary at %L for argument '%s'",
|
5662 |
|
|
&expr->where, fsym->name);
|
5663 |
|
|
else
|
5664 |
|
|
gfc_warning ("Creating array temporary at %L", &expr->where);
|
5665 |
|
|
}
|
5666 |
|
|
|
5667 |
|
|
ptr = build_call_expr_loc (input_location,
|
5668 |
|
|
gfor_fndecl_in_pack, 1, desc);
|
5669 |
|
|
|
5670 |
|
|
if (fsym && fsym->attr.optional && sym && sym->attr.optional)
|
5671 |
|
|
{
|
5672 |
|
|
tmp = gfc_conv_expr_present (sym);
|
5673 |
|
|
ptr = build3 (COND_EXPR, TREE_TYPE (se->expr), tmp,
|
5674 |
|
|
fold_convert (TREE_TYPE (se->expr), ptr),
|
5675 |
|
|
fold_convert (TREE_TYPE (se->expr), null_pointer_node));
|
5676 |
|
|
}
|
5677 |
|
|
|
5678 |
|
|
ptr = gfc_evaluate_now (ptr, &se->pre);
|
5679 |
|
|
|
5680 |
|
|
se->expr = ptr;
|
5681 |
|
|
|
5682 |
|
|
if (gfc_option.rtcheck & GFC_RTCHECK_ARRAY_TEMPS)
|
5683 |
|
|
{
|
5684 |
|
|
char * msg;
|
5685 |
|
|
|
5686 |
|
|
if (fsym && proc_name)
|
5687 |
|
|
asprintf (&msg, "An array temporary was created for argument "
|
5688 |
|
|
"'%s' of procedure '%s'", fsym->name, proc_name);
|
5689 |
|
|
else
|
5690 |
|
|
asprintf (&msg, "An array temporary was created");
|
5691 |
|
|
|
5692 |
|
|
tmp = build_fold_indirect_ref_loc (input_location,
|
5693 |
|
|
desc);
|
5694 |
|
|
tmp = gfc_conv_array_data (tmp);
|
5695 |
|
|
tmp = fold_build2 (NE_EXPR, boolean_type_node,
|
5696 |
|
|
fold_convert (TREE_TYPE (tmp), ptr), tmp);
|
5697 |
|
|
|
5698 |
|
|
if (fsym && fsym->attr.optional && sym && sym->attr.optional)
|
5699 |
|
|
tmp = fold_build2 (TRUTH_AND_EXPR, boolean_type_node,
|
5700 |
|
|
gfc_conv_expr_present (sym), tmp);
|
5701 |
|
|
|
5702 |
|
|
gfc_trans_runtime_check (false, true, tmp, &se->pre,
|
5703 |
|
|
&expr->where, msg);
|
5704 |
|
|
gfc_free (msg);
|
5705 |
|
|
}
|
5706 |
|
|
|
5707 |
|
|
gfc_start_block (&block);
|
5708 |
|
|
|
5709 |
|
|
/* Copy the data back. */
|
5710 |
|
|
if (fsym == NULL || fsym->attr.intent != INTENT_IN)
|
5711 |
|
|
{
|
5712 |
|
|
tmp = build_call_expr_loc (input_location,
|
5713 |
|
|
gfor_fndecl_in_unpack, 2, desc, ptr);
|
5714 |
|
|
gfc_add_expr_to_block (&block, tmp);
|
5715 |
|
|
}
|
5716 |
|
|
|
5717 |
|
|
/* Free the temporary. */
|
5718 |
|
|
tmp = gfc_call_free (convert (pvoid_type_node, ptr));
|
5719 |
|
|
gfc_add_expr_to_block (&block, tmp);
|
5720 |
|
|
|
5721 |
|
|
stmt = gfc_finish_block (&block);
|
5722 |
|
|
|
5723 |
|
|
gfc_init_block (&block);
|
5724 |
|
|
/* Only if it was repacked. This code needs to be executed before the
|
5725 |
|
|
loop cleanup code. */
|
5726 |
|
|
tmp = build_fold_indirect_ref_loc (input_location,
|
5727 |
|
|
desc);
|
5728 |
|
|
tmp = gfc_conv_array_data (tmp);
|
5729 |
|
|
tmp = fold_build2 (NE_EXPR, boolean_type_node,
|
5730 |
|
|
fold_convert (TREE_TYPE (tmp), ptr), tmp);
|
5731 |
|
|
|
5732 |
|
|
if (fsym && fsym->attr.optional && sym && sym->attr.optional)
|
5733 |
|
|
tmp = fold_build2 (TRUTH_AND_EXPR, boolean_type_node,
|
5734 |
|
|
gfc_conv_expr_present (sym), tmp);
|
5735 |
|
|
|
5736 |
|
|
tmp = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
|
5737 |
|
|
|
5738 |
|
|
gfc_add_expr_to_block (&block, tmp);
|
5739 |
|
|
gfc_add_block_to_block (&block, &se->post);
|
5740 |
|
|
|
5741 |
|
|
gfc_init_block (&se->post);
|
5742 |
|
|
gfc_add_block_to_block (&se->post, &block);
|
5743 |
|
|
}
|
5744 |
|
|
}
|
5745 |
|
|
|
5746 |
|
|
|
5747 |
|
|
/* Generate code to deallocate an array, if it is allocated. */
|
5748 |
|
|
|
5749 |
|
|
tree
|
5750 |
|
|
gfc_trans_dealloc_allocated (tree descriptor)
|
5751 |
|
|
{
|
5752 |
|
|
tree tmp;
|
5753 |
|
|
tree var;
|
5754 |
|
|
stmtblock_t block;
|
5755 |
|
|
|
5756 |
|
|
gfc_start_block (&block);
|
5757 |
|
|
|
5758 |
|
|
var = gfc_conv_descriptor_data_get (descriptor);
|
5759 |
|
|
STRIP_NOPS (var);
|
5760 |
|
|
|
5761 |
|
|
/* Call array_deallocate with an int * present in the second argument.
|
5762 |
|
|
Although it is ignored here, it's presence ensures that arrays that
|
5763 |
|
|
are already deallocated are ignored. */
|
5764 |
|
|
tmp = gfc_deallocate_with_status (var, NULL_TREE, true, NULL);
|
5765 |
|
|
gfc_add_expr_to_block (&block, tmp);
|
5766 |
|
|
|
5767 |
|
|
/* Zero the data pointer. */
|
5768 |
|
|
tmp = fold_build2 (MODIFY_EXPR, void_type_node,
|
5769 |
|
|
var, build_int_cst (TREE_TYPE (var), 0));
|
5770 |
|
|
gfc_add_expr_to_block (&block, tmp);
|
5771 |
|
|
|
5772 |
|
|
return gfc_finish_block (&block);
|
5773 |
|
|
}
|
5774 |
|
|
|
5775 |
|
|
|
5776 |
|
|
/* This helper function calculates the size in words of a full array. */
|
5777 |
|
|
|
5778 |
|
|
static tree
|
5779 |
|
|
get_full_array_size (stmtblock_t *block, tree decl, int rank)
|
5780 |
|
|
{
|
5781 |
|
|
tree idx;
|
5782 |
|
|
tree nelems;
|
5783 |
|
|
tree tmp;
|
5784 |
|
|
idx = gfc_rank_cst[rank - 1];
|
5785 |
|
|
nelems = gfc_conv_descriptor_ubound_get (decl, idx);
|
5786 |
|
|
tmp = gfc_conv_descriptor_lbound_get (decl, idx);
|
5787 |
|
|
tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, nelems, tmp);
|
5788 |
|
|
tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
|
5789 |
|
|
tmp, gfc_index_one_node);
|
5790 |
|
|
tmp = gfc_evaluate_now (tmp, block);
|
5791 |
|
|
|
5792 |
|
|
nelems = gfc_conv_descriptor_stride_get (decl, idx);
|
5793 |
|
|
tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, nelems, tmp);
|
5794 |
|
|
return gfc_evaluate_now (tmp, block);
|
5795 |
|
|
}
|
5796 |
|
|
|
5797 |
|
|
|
5798 |
|
|
/* Allocate dest to the same size as src, and copy src -> dest.
|
5799 |
|
|
If no_malloc is set, only the copy is done. */
|
5800 |
|
|
|
5801 |
|
|
static tree
|
5802 |
|
|
duplicate_allocatable(tree dest, tree src, tree type, int rank,
|
5803 |
|
|
bool no_malloc)
|
5804 |
|
|
{
|
5805 |
|
|
tree tmp;
|
5806 |
|
|
tree size;
|
5807 |
|
|
tree nelems;
|
5808 |
|
|
tree null_cond;
|
5809 |
|
|
tree null_data;
|
5810 |
|
|
stmtblock_t block;
|
5811 |
|
|
|
5812 |
|
|
/* If the source is null, set the destination to null. Then,
|
5813 |
|
|
allocate memory to the destination. */
|
5814 |
|
|
gfc_init_block (&block);
|
5815 |
|
|
|
5816 |
|
|
if (rank == 0)
|
5817 |
|
|
{
|
5818 |
|
|
tmp = null_pointer_node;
|
5819 |
|
|
tmp = fold_build2 (MODIFY_EXPR, type, dest, tmp);
|
5820 |
|
|
gfc_add_expr_to_block (&block, tmp);
|
5821 |
|
|
null_data = gfc_finish_block (&block);
|
5822 |
|
|
|
5823 |
|
|
gfc_init_block (&block);
|
5824 |
|
|
size = TYPE_SIZE_UNIT (type);
|
5825 |
|
|
if (!no_malloc)
|
5826 |
|
|
{
|
5827 |
|
|
tmp = gfc_call_malloc (&block, type, size);
|
5828 |
|
|
tmp = fold_build2 (MODIFY_EXPR, void_type_node, dest,
|
5829 |
|
|
fold_convert (type, tmp));
|
5830 |
|
|
gfc_add_expr_to_block (&block, tmp);
|
5831 |
|
|
}
|
5832 |
|
|
|
5833 |
|
|
tmp = built_in_decls[BUILT_IN_MEMCPY];
|
5834 |
|
|
tmp = build_call_expr_loc (input_location, tmp, 3,
|
5835 |
|
|
dest, src, size);
|
5836 |
|
|
}
|
5837 |
|
|
else
|
5838 |
|
|
{
|
5839 |
|
|
gfc_conv_descriptor_data_set (&block, dest, null_pointer_node);
|
5840 |
|
|
null_data = gfc_finish_block (&block);
|
5841 |
|
|
|
5842 |
|
|
gfc_init_block (&block);
|
5843 |
|
|
nelems = get_full_array_size (&block, src, rank);
|
5844 |
|
|
tmp = fold_convert (gfc_array_index_type,
|
5845 |
|
|
TYPE_SIZE_UNIT (gfc_get_element_type (type)));
|
5846 |
|
|
size = fold_build2 (MULT_EXPR, gfc_array_index_type, nelems, tmp);
|
5847 |
|
|
if (!no_malloc)
|
5848 |
|
|
{
|
5849 |
|
|
tmp = TREE_TYPE (gfc_conv_descriptor_data_get (src));
|
5850 |
|
|
tmp = gfc_call_malloc (&block, tmp, size);
|
5851 |
|
|
gfc_conv_descriptor_data_set (&block, dest, tmp);
|
5852 |
|
|
}
|
5853 |
|
|
|
5854 |
|
|
/* We know the temporary and the value will be the same length,
|
5855 |
|
|
so can use memcpy. */
|
5856 |
|
|
tmp = built_in_decls[BUILT_IN_MEMCPY];
|
5857 |
|
|
tmp = build_call_expr_loc (input_location,
|
5858 |
|
|
tmp, 3, gfc_conv_descriptor_data_get (dest),
|
5859 |
|
|
gfc_conv_descriptor_data_get (src), size);
|
5860 |
|
|
}
|
5861 |
|
|
|
5862 |
|
|
gfc_add_expr_to_block (&block, tmp);
|
5863 |
|
|
tmp = gfc_finish_block (&block);
|
5864 |
|
|
|
5865 |
|
|
/* Null the destination if the source is null; otherwise do
|
5866 |
|
|
the allocate and copy. */
|
5867 |
|
|
if (rank == 0)
|
5868 |
|
|
null_cond = src;
|
5869 |
|
|
else
|
5870 |
|
|
null_cond = gfc_conv_descriptor_data_get (src);
|
5871 |
|
|
|
5872 |
|
|
null_cond = convert (pvoid_type_node, null_cond);
|
5873 |
|
|
null_cond = fold_build2 (NE_EXPR, boolean_type_node,
|
5874 |
|
|
null_cond, null_pointer_node);
|
5875 |
|
|
return build3_v (COND_EXPR, null_cond, tmp, null_data);
|
5876 |
|
|
}
|
5877 |
|
|
|
5878 |
|
|
|
5879 |
|
|
/* Allocate dest to the same size as src, and copy data src -> dest. */
|
5880 |
|
|
|
5881 |
|
|
tree
|
5882 |
|
|
gfc_duplicate_allocatable (tree dest, tree src, tree type, int rank)
|
5883 |
|
|
{
|
5884 |
|
|
return duplicate_allocatable(dest, src, type, rank, false);
|
5885 |
|
|
}
|
5886 |
|
|
|
5887 |
|
|
|
5888 |
|
|
/* Copy data src -> dest. */
|
5889 |
|
|
|
5890 |
|
|
tree
|
5891 |
|
|
gfc_copy_allocatable_data (tree dest, tree src, tree type, int rank)
|
5892 |
|
|
{
|
5893 |
|
|
return duplicate_allocatable(dest, src, type, rank, true);
|
5894 |
|
|
}
|
5895 |
|
|
|
5896 |
|
|
|
5897 |
|
|
/* Recursively traverse an object of derived type, generating code to
|
5898 |
|
|
deallocate, nullify or copy allocatable components. This is the work horse
|
5899 |
|
|
function for the functions named in this enum. */
|
5900 |
|
|
|
5901 |
|
|
enum {DEALLOCATE_ALLOC_COMP = 1, NULLIFY_ALLOC_COMP, COPY_ALLOC_COMP,
|
5902 |
|
|
COPY_ONLY_ALLOC_COMP};
|
5903 |
|
|
|
5904 |
|
|
static tree
|
5905 |
|
|
structure_alloc_comps (gfc_symbol * der_type, tree decl,
|
5906 |
|
|
tree dest, int rank, int purpose)
|
5907 |
|
|
{
|
5908 |
|
|
gfc_component *c;
|
5909 |
|
|
gfc_loopinfo loop;
|
5910 |
|
|
stmtblock_t fnblock;
|
5911 |
|
|
stmtblock_t loopbody;
|
5912 |
|
|
tree tmp;
|
5913 |
|
|
tree comp;
|
5914 |
|
|
tree dcmp;
|
5915 |
|
|
tree nelems;
|
5916 |
|
|
tree index;
|
5917 |
|
|
tree var;
|
5918 |
|
|
tree cdecl;
|
5919 |
|
|
tree ctype;
|
5920 |
|
|
tree vref, dref;
|
5921 |
|
|
tree null_cond = NULL_TREE;
|
5922 |
|
|
|
5923 |
|
|
gfc_init_block (&fnblock);
|
5924 |
|
|
|
5925 |
|
|
if (POINTER_TYPE_P (TREE_TYPE (decl)) && rank != 0)
|
5926 |
|
|
decl = build_fold_indirect_ref_loc (input_location,
|
5927 |
|
|
decl);
|
5928 |
|
|
|
5929 |
|
|
/* If this an array of derived types with allocatable components
|
5930 |
|
|
build a loop and recursively call this function. */
|
5931 |
|
|
if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
|
5932 |
|
|
|| GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
|
5933 |
|
|
{
|
5934 |
|
|
tmp = gfc_conv_array_data (decl);
|
5935 |
|
|
var = build_fold_indirect_ref_loc (input_location,
|
5936 |
|
|
tmp);
|
5937 |
|
|
|
5938 |
|
|
/* Get the number of elements - 1 and set the counter. */
|
5939 |
|
|
if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
|
5940 |
|
|
{
|
5941 |
|
|
/* Use the descriptor for an allocatable array. Since this
|
5942 |
|
|
is a full array reference, we only need the descriptor
|
5943 |
|
|
information from dimension = rank. */
|
5944 |
|
|
tmp = get_full_array_size (&fnblock, decl, rank);
|
5945 |
|
|
tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
|
5946 |
|
|
tmp, gfc_index_one_node);
|
5947 |
|
|
|
5948 |
|
|
null_cond = gfc_conv_descriptor_data_get (decl);
|
5949 |
|
|
null_cond = fold_build2 (NE_EXPR, boolean_type_node, null_cond,
|
5950 |
|
|
build_int_cst (TREE_TYPE (null_cond), 0));
|
5951 |
|
|
}
|
5952 |
|
|
else
|
5953 |
|
|
{
|
5954 |
|
|
/* Otherwise use the TYPE_DOMAIN information. */
|
5955 |
|
|
tmp = array_type_nelts (TREE_TYPE (decl));
|
5956 |
|
|
tmp = fold_convert (gfc_array_index_type, tmp);
|
5957 |
|
|
}
|
5958 |
|
|
|
5959 |
|
|
/* Remember that this is, in fact, the no. of elements - 1. */
|
5960 |
|
|
nelems = gfc_evaluate_now (tmp, &fnblock);
|
5961 |
|
|
index = gfc_create_var (gfc_array_index_type, "S");
|
5962 |
|
|
|
5963 |
|
|
/* Build the body of the loop. */
|
5964 |
|
|
gfc_init_block (&loopbody);
|
5965 |
|
|
|
5966 |
|
|
vref = gfc_build_array_ref (var, index, NULL);
|
5967 |
|
|
|
5968 |
|
|
if (purpose == COPY_ALLOC_COMP)
|
5969 |
|
|
{
|
5970 |
|
|
if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
|
5971 |
|
|
{
|
5972 |
|
|
tmp = gfc_duplicate_allocatable (dest, decl, TREE_TYPE(decl), rank);
|
5973 |
|
|
gfc_add_expr_to_block (&fnblock, tmp);
|
5974 |
|
|
}
|
5975 |
|
|
tmp = build_fold_indirect_ref_loc (input_location,
|
5976 |
|
|
gfc_conv_array_data (dest));
|
5977 |
|
|
dref = gfc_build_array_ref (tmp, index, NULL);
|
5978 |
|
|
tmp = structure_alloc_comps (der_type, vref, dref, rank, purpose);
|
5979 |
|
|
}
|
5980 |
|
|
else if (purpose == COPY_ONLY_ALLOC_COMP)
|
5981 |
|
|
{
|
5982 |
|
|
tmp = build_fold_indirect_ref_loc (input_location,
|
5983 |
|
|
gfc_conv_array_data (dest));
|
5984 |
|
|
dref = gfc_build_array_ref (tmp, index, NULL);
|
5985 |
|
|
tmp = structure_alloc_comps (der_type, vref, dref, rank,
|
5986 |
|
|
COPY_ALLOC_COMP);
|
5987 |
|
|
}
|
5988 |
|
|
else
|
5989 |
|
|
tmp = structure_alloc_comps (der_type, vref, NULL_TREE, rank, purpose);
|
5990 |
|
|
|
5991 |
|
|
gfc_add_expr_to_block (&loopbody, tmp);
|
5992 |
|
|
|
5993 |
|
|
/* Build the loop and return. */
|
5994 |
|
|
gfc_init_loopinfo (&loop);
|
5995 |
|
|
loop.dimen = 1;
|
5996 |
|
|
loop.from[0] = gfc_index_zero_node;
|
5997 |
|
|
loop.loopvar[0] = index;
|
5998 |
|
|
loop.to[0] = nelems;
|
5999 |
|
|
gfc_trans_scalarizing_loops (&loop, &loopbody);
|
6000 |
|
|
gfc_add_block_to_block (&fnblock, &loop.pre);
|
6001 |
|
|
|
6002 |
|
|
tmp = gfc_finish_block (&fnblock);
|
6003 |
|
|
if (null_cond != NULL_TREE)
|
6004 |
|
|
tmp = build3_v (COND_EXPR, null_cond, tmp,
|
6005 |
|
|
build_empty_stmt (input_location));
|
6006 |
|
|
|
6007 |
|
|
return tmp;
|
6008 |
|
|
}
|
6009 |
|
|
|
6010 |
|
|
/* Otherwise, act on the components or recursively call self to
|
6011 |
|
|
act on a chain of components. */
|
6012 |
|
|
for (c = der_type->components; c; c = c->next)
|
6013 |
|
|
{
|
6014 |
|
|
bool cmp_has_alloc_comps = (c->ts.type == BT_DERIVED)
|
6015 |
|
|
&& c->ts.u.derived->attr.alloc_comp;
|
6016 |
|
|
cdecl = c->backend_decl;
|
6017 |
|
|
ctype = TREE_TYPE (cdecl);
|
6018 |
|
|
|
6019 |
|
|
switch (purpose)
|
6020 |
|
|
{
|
6021 |
|
|
case DEALLOCATE_ALLOC_COMP:
|
6022 |
|
|
/* Do not deallocate the components of ultimate pointer
|
6023 |
|
|
components. */
|
6024 |
|
|
if (cmp_has_alloc_comps && !c->attr.pointer)
|
6025 |
|
|
{
|
6026 |
|
|
comp = fold_build3 (COMPONENT_REF, ctype,
|
6027 |
|
|
decl, cdecl, NULL_TREE);
|
6028 |
|
|
rank = c->as ? c->as->rank : 0;
|
6029 |
|
|
tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
|
6030 |
|
|
rank, purpose);
|
6031 |
|
|
gfc_add_expr_to_block (&fnblock, tmp);
|
6032 |
|
|
}
|
6033 |
|
|
|
6034 |
|
|
if (c->attr.allocatable && c->attr.dimension)
|
6035 |
|
|
{
|
6036 |
|
|
comp = fold_build3 (COMPONENT_REF, ctype,
|
6037 |
|
|
decl, cdecl, NULL_TREE);
|
6038 |
|
|
tmp = gfc_trans_dealloc_allocated (comp);
|
6039 |
|
|
gfc_add_expr_to_block (&fnblock, tmp);
|
6040 |
|
|
}
|
6041 |
|
|
else if (c->attr.allocatable)
|
6042 |
|
|
{
|
6043 |
|
|
/* Allocatable scalar components. */
|
6044 |
|
|
comp = fold_build3 (COMPONENT_REF, ctype, decl, cdecl, NULL_TREE);
|
6045 |
|
|
|
6046 |
|
|
tmp = gfc_deallocate_with_status (comp, NULL_TREE, true, NULL);
|
6047 |
|
|
gfc_add_expr_to_block (&fnblock, tmp);
|
6048 |
|
|
|
6049 |
|
|
tmp = fold_build2 (MODIFY_EXPR, void_type_node, comp,
|
6050 |
|
|
build_int_cst (TREE_TYPE (comp), 0));
|
6051 |
|
|
gfc_add_expr_to_block (&fnblock, tmp);
|
6052 |
|
|
}
|
6053 |
|
|
else if (c->ts.type == BT_CLASS
|
6054 |
|
|
&& c->ts.u.derived->components->attr.allocatable)
|
6055 |
|
|
{
|
6056 |
|
|
/* Allocatable scalar CLASS components. */
|
6057 |
|
|
comp = fold_build3 (COMPONENT_REF, ctype, decl, cdecl, NULL_TREE);
|
6058 |
|
|
|
6059 |
|
|
/* Add reference to '$data' component. */
|
6060 |
|
|
tmp = c->ts.u.derived->components->backend_decl;
|
6061 |
|
|
comp = fold_build3 (COMPONENT_REF, TREE_TYPE (tmp),
|
6062 |
|
|
comp, tmp, NULL_TREE);
|
6063 |
|
|
|
6064 |
|
|
tmp = gfc_deallocate_with_status (comp, NULL_TREE, true, NULL);
|
6065 |
|
|
gfc_add_expr_to_block (&fnblock, tmp);
|
6066 |
|
|
|
6067 |
|
|
tmp = fold_build2 (MODIFY_EXPR, void_type_node, comp,
|
6068 |
|
|
build_int_cst (TREE_TYPE (comp), 0));
|
6069 |
|
|
gfc_add_expr_to_block (&fnblock, tmp);
|
6070 |
|
|
}
|
6071 |
|
|
break;
|
6072 |
|
|
|
6073 |
|
|
case NULLIFY_ALLOC_COMP:
|
6074 |
|
|
if (c->attr.pointer)
|
6075 |
|
|
continue;
|
6076 |
|
|
else if (c->attr.allocatable && c->attr.dimension)
|
6077 |
|
|
{
|
6078 |
|
|
comp = fold_build3 (COMPONENT_REF, ctype,
|
6079 |
|
|
decl, cdecl, NULL_TREE);
|
6080 |
|
|
gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
|
6081 |
|
|
}
|
6082 |
|
|
else if (c->attr.allocatable)
|
6083 |
|
|
{
|
6084 |
|
|
/* Allocatable scalar components. */
|
6085 |
|
|
comp = fold_build3 (COMPONENT_REF, ctype, decl, cdecl, NULL_TREE);
|
6086 |
|
|
tmp = fold_build2 (MODIFY_EXPR, void_type_node, comp,
|
6087 |
|
|
build_int_cst (TREE_TYPE (comp), 0));
|
6088 |
|
|
gfc_add_expr_to_block (&fnblock, tmp);
|
6089 |
|
|
}
|
6090 |
|
|
else if (c->ts.type == BT_CLASS
|
6091 |
|
|
&& c->ts.u.derived->components->attr.allocatable)
|
6092 |
|
|
{
|
6093 |
|
|
/* Allocatable scalar CLASS components. */
|
6094 |
|
|
comp = fold_build3 (COMPONENT_REF, ctype, decl, cdecl, NULL_TREE);
|
6095 |
|
|
/* Add reference to '$data' component. */
|
6096 |
|
|
tmp = c->ts.u.derived->components->backend_decl;
|
6097 |
|
|
comp = fold_build3 (COMPONENT_REF, TREE_TYPE (tmp),
|
6098 |
|
|
comp, tmp, NULL_TREE);
|
6099 |
|
|
tmp = fold_build2 (MODIFY_EXPR, void_type_node, comp,
|
6100 |
|
|
build_int_cst (TREE_TYPE (comp), 0));
|
6101 |
|
|
gfc_add_expr_to_block (&fnblock, tmp);
|
6102 |
|
|
}
|
6103 |
|
|
else if (cmp_has_alloc_comps)
|
6104 |
|
|
{
|
6105 |
|
|
comp = fold_build3 (COMPONENT_REF, ctype,
|
6106 |
|
|
decl, cdecl, NULL_TREE);
|
6107 |
|
|
rank = c->as ? c->as->rank : 0;
|
6108 |
|
|
tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
|
6109 |
|
|
rank, purpose);
|
6110 |
|
|
gfc_add_expr_to_block (&fnblock, tmp);
|
6111 |
|
|
}
|
6112 |
|
|
break;
|
6113 |
|
|
|
6114 |
|
|
case COPY_ALLOC_COMP:
|
6115 |
|
|
if (c->attr.pointer)
|
6116 |
|
|
continue;
|
6117 |
|
|
|
6118 |
|
|
/* We need source and destination components. */
|
6119 |
|
|
comp = fold_build3 (COMPONENT_REF, ctype, decl, cdecl, NULL_TREE);
|
6120 |
|
|
dcmp = fold_build3 (COMPONENT_REF, ctype, dest, cdecl, NULL_TREE);
|
6121 |
|
|
dcmp = fold_convert (TREE_TYPE (comp), dcmp);
|
6122 |
|
|
|
6123 |
|
|
if (c->attr.allocatable && !cmp_has_alloc_comps)
|
6124 |
|
|
{
|
6125 |
|
|
rank = c->as ? c->as->rank : 0;
|
6126 |
|
|
tmp = gfc_duplicate_allocatable(dcmp, comp, ctype, rank);
|
6127 |
|
|
gfc_add_expr_to_block (&fnblock, tmp);
|
6128 |
|
|
}
|
6129 |
|
|
|
6130 |
|
|
if (cmp_has_alloc_comps)
|
6131 |
|
|
{
|
6132 |
|
|
rank = c->as ? c->as->rank : 0;
|
6133 |
|
|
tmp = fold_convert (TREE_TYPE (dcmp), comp);
|
6134 |
|
|
gfc_add_modify (&fnblock, dcmp, tmp);
|
6135 |
|
|
tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp,
|
6136 |
|
|
rank, purpose);
|
6137 |
|
|
gfc_add_expr_to_block (&fnblock, tmp);
|
6138 |
|
|
}
|
6139 |
|
|
break;
|
6140 |
|
|
|
6141 |
|
|
default:
|
6142 |
|
|
gcc_unreachable ();
|
6143 |
|
|
break;
|
6144 |
|
|
}
|
6145 |
|
|
}
|
6146 |
|
|
|
6147 |
|
|
return gfc_finish_block (&fnblock);
|
6148 |
|
|
}
|
6149 |
|
|
|
6150 |
|
|
/* Recursively traverse an object of derived type, generating code to
|
6151 |
|
|
nullify allocatable components. */
|
6152 |
|
|
|
6153 |
|
|
tree
|
6154 |
|
|
gfc_nullify_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
|
6155 |
|
|
{
|
6156 |
|
|
return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
|
6157 |
|
|
NULLIFY_ALLOC_COMP);
|
6158 |
|
|
}
|
6159 |
|
|
|
6160 |
|
|
|
6161 |
|
|
/* Recursively traverse an object of derived type, generating code to
|
6162 |
|
|
deallocate allocatable components. */
|
6163 |
|
|
|
6164 |
|
|
tree
|
6165 |
|
|
gfc_deallocate_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
|
6166 |
|
|
{
|
6167 |
|
|
return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
|
6168 |
|
|
DEALLOCATE_ALLOC_COMP);
|
6169 |
|
|
}
|
6170 |
|
|
|
6171 |
|
|
|
6172 |
|
|
/* Recursively traverse an object of derived type, generating code to
|
6173 |
|
|
copy it and its allocatable components. */
|
6174 |
|
|
|
6175 |
|
|
tree
|
6176 |
|
|
gfc_copy_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
|
6177 |
|
|
{
|
6178 |
|
|
return structure_alloc_comps (der_type, decl, dest, rank, COPY_ALLOC_COMP);
|
6179 |
|
|
}
|
6180 |
|
|
|
6181 |
|
|
|
6182 |
|
|
/* Recursively traverse an object of derived type, generating code to
|
6183 |
|
|
copy only its allocatable components. */
|
6184 |
|
|
|
6185 |
|
|
tree
|
6186 |
|
|
gfc_copy_only_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
|
6187 |
|
|
{
|
6188 |
|
|
return structure_alloc_comps (der_type, decl, dest, rank, COPY_ONLY_ALLOC_COMP);
|
6189 |
|
|
}
|
6190 |
|
|
|
6191 |
|
|
|
6192 |
|
|
/* NULLIFY an allocatable/pointer array on function entry, free it on exit.
|
6193 |
|
|
Do likewise, recursively if necessary, with the allocatable components of
|
6194 |
|
|
derived types. */
|
6195 |
|
|
|
6196 |
|
|
tree
|
6197 |
|
|
gfc_trans_deferred_array (gfc_symbol * sym, tree body)
|
6198 |
|
|
{
|
6199 |
|
|
tree type;
|
6200 |
|
|
tree tmp;
|
6201 |
|
|
tree descriptor;
|
6202 |
|
|
stmtblock_t fnblock;
|
6203 |
|
|
locus loc;
|
6204 |
|
|
int rank;
|
6205 |
|
|
bool sym_has_alloc_comp;
|
6206 |
|
|
|
6207 |
|
|
sym_has_alloc_comp = (sym->ts.type == BT_DERIVED)
|
6208 |
|
|
&& sym->ts.u.derived->attr.alloc_comp;
|
6209 |
|
|
|
6210 |
|
|
/* Make sure the frontend gets these right. */
|
6211 |
|
|
if (!(sym->attr.pointer || sym->attr.allocatable || sym_has_alloc_comp))
|
6212 |
|
|
fatal_error ("Possible frontend bug: Deferred array size without pointer, "
|
6213 |
|
|
"allocatable attribute or derived type without allocatable "
|
6214 |
|
|
"components.");
|
6215 |
|
|
|
6216 |
|
|
gfc_init_block (&fnblock);
|
6217 |
|
|
|
6218 |
|
|
gcc_assert (TREE_CODE (sym->backend_decl) == VAR_DECL
|
6219 |
|
|
|| TREE_CODE (sym->backend_decl) == PARM_DECL);
|
6220 |
|
|
|
6221 |
|
|
if (sym->ts.type == BT_CHARACTER
|
6222 |
|
|
&& !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
|
6223 |
|
|
{
|
6224 |
|
|
gfc_conv_string_length (sym->ts.u.cl, NULL, &fnblock);
|
6225 |
|
|
gfc_trans_vla_type_sizes (sym, &fnblock);
|
6226 |
|
|
}
|
6227 |
|
|
|
6228 |
|
|
/* Dummy, use associated and result variables don't need anything special. */
|
6229 |
|
|
if (sym->attr.dummy || sym->attr.use_assoc || sym->attr.result)
|
6230 |
|
|
{
|
6231 |
|
|
gfc_add_expr_to_block (&fnblock, body);
|
6232 |
|
|
|
6233 |
|
|
return gfc_finish_block (&fnblock);
|
6234 |
|
|
}
|
6235 |
|
|
|
6236 |
|
|
gfc_get_backend_locus (&loc);
|
6237 |
|
|
gfc_set_backend_locus (&sym->declared_at);
|
6238 |
|
|
descriptor = sym->backend_decl;
|
6239 |
|
|
|
6240 |
|
|
/* Although static, derived types with default initializers and
|
6241 |
|
|
allocatable components must not be nulled wholesale; instead they
|
6242 |
|
|
are treated component by component. */
|
6243 |
|
|
if (TREE_STATIC (descriptor) && !sym_has_alloc_comp)
|
6244 |
|
|
{
|
6245 |
|
|
/* SAVEd variables are not freed on exit. */
|
6246 |
|
|
gfc_trans_static_array_pointer (sym);
|
6247 |
|
|
return body;
|
6248 |
|
|
}
|
6249 |
|
|
|
6250 |
|
|
/* Get the descriptor type. */
|
6251 |
|
|
type = TREE_TYPE (sym->backend_decl);
|
6252 |
|
|
|
6253 |
|
|
if (sym_has_alloc_comp && !(sym->attr.pointer || sym->attr.allocatable))
|
6254 |
|
|
{
|
6255 |
|
|
if (!sym->attr.save)
|
6256 |
|
|
{
|
6257 |
|
|
rank = sym->as ? sym->as->rank : 0;
|
6258 |
|
|
tmp = gfc_nullify_alloc_comp (sym->ts.u.derived, descriptor, rank);
|
6259 |
|
|
gfc_add_expr_to_block (&fnblock, tmp);
|
6260 |
|
|
if (sym->value)
|
6261 |
|
|
{
|
6262 |
|
|
tmp = gfc_init_default_dt (sym, NULL);
|
6263 |
|
|
gfc_add_expr_to_block (&fnblock, tmp);
|
6264 |
|
|
}
|
6265 |
|
|
}
|
6266 |
|
|
}
|
6267 |
|
|
else if (!GFC_DESCRIPTOR_TYPE_P (type))
|
6268 |
|
|
{
|
6269 |
|
|
/* If the backend_decl is not a descriptor, we must have a pointer
|
6270 |
|
|
to one. */
|
6271 |
|
|
descriptor = build_fold_indirect_ref_loc (input_location,
|
6272 |
|
|
sym->backend_decl);
|
6273 |
|
|
type = TREE_TYPE (descriptor);
|
6274 |
|
|
}
|
6275 |
|
|
|
6276 |
|
|
/* NULLIFY the data pointer. */
|
6277 |
|
|
if (GFC_DESCRIPTOR_TYPE_P (type) && !sym->attr.save)
|
6278 |
|
|
gfc_conv_descriptor_data_set (&fnblock, descriptor, null_pointer_node);
|
6279 |
|
|
|
6280 |
|
|
gfc_add_expr_to_block (&fnblock, body);
|
6281 |
|
|
|
6282 |
|
|
gfc_set_backend_locus (&loc);
|
6283 |
|
|
|
6284 |
|
|
/* Allocatable arrays need to be freed when they go out of scope.
|
6285 |
|
|
The allocatable components of pointers must not be touched. */
|
6286 |
|
|
if (sym_has_alloc_comp && !(sym->attr.function || sym->attr.result)
|
6287 |
|
|
&& !sym->attr.pointer && !sym->attr.save)
|
6288 |
|
|
{
|
6289 |
|
|
int rank;
|
6290 |
|
|
rank = sym->as ? sym->as->rank : 0;
|
6291 |
|
|
tmp = gfc_deallocate_alloc_comp (sym->ts.u.derived, descriptor, rank);
|
6292 |
|
|
gfc_add_expr_to_block (&fnblock, tmp);
|
6293 |
|
|
}
|
6294 |
|
|
|
6295 |
|
|
if (sym->attr.allocatable && sym->attr.dimension
|
6296 |
|
|
&& !sym->attr.save && !sym->attr.result)
|
6297 |
|
|
{
|
6298 |
|
|
tmp = gfc_trans_dealloc_allocated (sym->backend_decl);
|
6299 |
|
|
gfc_add_expr_to_block (&fnblock, tmp);
|
6300 |
|
|
}
|
6301 |
|
|
|
6302 |
|
|
return gfc_finish_block (&fnblock);
|
6303 |
|
|
}
|
6304 |
|
|
|
6305 |
|
|
/************ Expression Walking Functions ******************/
|
6306 |
|
|
|
6307 |
|
|
/* Walk a variable reference.
|
6308 |
|
|
|
6309 |
|
|
Possible extension - multiple component subscripts.
|
6310 |
|
|
x(:,:) = foo%a(:)%b(:)
|
6311 |
|
|
Transforms to
|
6312 |
|
|
forall (i=..., j=...)
|
6313 |
|
|
x(i,j) = foo%a(j)%b(i)
|
6314 |
|
|
end forall
|
6315 |
|
|
This adds a fair amount of complexity because you need to deal with more
|
6316 |
|
|
than one ref. Maybe handle in a similar manner to vector subscripts.
|
6317 |
|
|
Maybe not worth the effort. */
|
6318 |
|
|
|
6319 |
|
|
|
6320 |
|
|
static gfc_ss *
|
6321 |
|
|
gfc_walk_variable_expr (gfc_ss * ss, gfc_expr * expr)
|
6322 |
|
|
{
|
6323 |
|
|
gfc_ref *ref;
|
6324 |
|
|
gfc_array_ref *ar;
|
6325 |
|
|
gfc_ss *newss;
|
6326 |
|
|
int n;
|
6327 |
|
|
|
6328 |
|
|
for (ref = expr->ref; ref; ref = ref->next)
|
6329 |
|
|
if (ref->type == REF_ARRAY && ref->u.ar.type != AR_ELEMENT)
|
6330 |
|
|
break;
|
6331 |
|
|
|
6332 |
|
|
for (; ref; ref = ref->next)
|
6333 |
|
|
{
|
6334 |
|
|
if (ref->type == REF_SUBSTRING)
|
6335 |
|
|
{
|
6336 |
|
|
newss = gfc_get_ss ();
|
6337 |
|
|
newss->type = GFC_SS_SCALAR;
|
6338 |
|
|
newss->expr = ref->u.ss.start;
|
6339 |
|
|
newss->next = ss;
|
6340 |
|
|
ss = newss;
|
6341 |
|
|
|
6342 |
|
|
newss = gfc_get_ss ();
|
6343 |
|
|
newss->type = GFC_SS_SCALAR;
|
6344 |
|
|
newss->expr = ref->u.ss.end;
|
6345 |
|
|
newss->next = ss;
|
6346 |
|
|
ss = newss;
|
6347 |
|
|
}
|
6348 |
|
|
|
6349 |
|
|
/* We're only interested in array sections from now on. */
|
6350 |
|
|
if (ref->type != REF_ARRAY)
|
6351 |
|
|
continue;
|
6352 |
|
|
|
6353 |
|
|
ar = &ref->u.ar;
|
6354 |
|
|
switch (ar->type)
|
6355 |
|
|
{
|
6356 |
|
|
case AR_ELEMENT:
|
6357 |
|
|
for (n = 0; n < ar->dimen; n++)
|
6358 |
|
|
{
|
6359 |
|
|
newss = gfc_get_ss ();
|
6360 |
|
|
newss->type = GFC_SS_SCALAR;
|
6361 |
|
|
newss->expr = ar->start[n];
|
6362 |
|
|
newss->next = ss;
|
6363 |
|
|
ss = newss;
|
6364 |
|
|
}
|
6365 |
|
|
break;
|
6366 |
|
|
|
6367 |
|
|
case AR_FULL:
|
6368 |
|
|
newss = gfc_get_ss ();
|
6369 |
|
|
newss->type = GFC_SS_SECTION;
|
6370 |
|
|
newss->expr = expr;
|
6371 |
|
|
newss->next = ss;
|
6372 |
|
|
newss->data.info.dimen = ar->as->rank;
|
6373 |
|
|
newss->data.info.ref = ref;
|
6374 |
|
|
|
6375 |
|
|
/* Make sure array is the same as array(:,:), this way
|
6376 |
|
|
we don't need to special case all the time. */
|
6377 |
|
|
ar->dimen = ar->as->rank;
|
6378 |
|
|
for (n = 0; n < ar->dimen; n++)
|
6379 |
|
|
{
|
6380 |
|
|
newss->data.info.dim[n] = n;
|
6381 |
|
|
ar->dimen_type[n] = DIMEN_RANGE;
|
6382 |
|
|
|
6383 |
|
|
gcc_assert (ar->start[n] == NULL);
|
6384 |
|
|
gcc_assert (ar->end[n] == NULL);
|
6385 |
|
|
gcc_assert (ar->stride[n] == NULL);
|
6386 |
|
|
}
|
6387 |
|
|
ss = newss;
|
6388 |
|
|
break;
|
6389 |
|
|
|
6390 |
|
|
case AR_SECTION:
|
6391 |
|
|
newss = gfc_get_ss ();
|
6392 |
|
|
newss->type = GFC_SS_SECTION;
|
6393 |
|
|
newss->expr = expr;
|
6394 |
|
|
newss->next = ss;
|
6395 |
|
|
newss->data.info.dimen = 0;
|
6396 |
|
|
newss->data.info.ref = ref;
|
6397 |
|
|
|
6398 |
|
|
/* We add SS chains for all the subscripts in the section. */
|
6399 |
|
|
for (n = 0; n < ar->dimen; n++)
|
6400 |
|
|
{
|
6401 |
|
|
gfc_ss *indexss;
|
6402 |
|
|
|
6403 |
|
|
switch (ar->dimen_type[n])
|
6404 |
|
|
{
|
6405 |
|
|
case DIMEN_ELEMENT:
|
6406 |
|
|
/* Add SS for elemental (scalar) subscripts. */
|
6407 |
|
|
gcc_assert (ar->start[n]);
|
6408 |
|
|
indexss = gfc_get_ss ();
|
6409 |
|
|
indexss->type = GFC_SS_SCALAR;
|
6410 |
|
|
indexss->expr = ar->start[n];
|
6411 |
|
|
indexss->next = gfc_ss_terminator;
|
6412 |
|
|
indexss->loop_chain = gfc_ss_terminator;
|
6413 |
|
|
newss->data.info.subscript[n] = indexss;
|
6414 |
|
|
break;
|
6415 |
|
|
|
6416 |
|
|
case DIMEN_RANGE:
|
6417 |
|
|
/* We don't add anything for sections, just remember this
|
6418 |
|
|
dimension for later. */
|
6419 |
|
|
newss->data.info.dim[newss->data.info.dimen] = n;
|
6420 |
|
|
newss->data.info.dimen++;
|
6421 |
|
|
break;
|
6422 |
|
|
|
6423 |
|
|
case DIMEN_VECTOR:
|
6424 |
|
|
/* Create a GFC_SS_VECTOR index in which we can store
|
6425 |
|
|
the vector's descriptor. */
|
6426 |
|
|
indexss = gfc_get_ss ();
|
6427 |
|
|
indexss->type = GFC_SS_VECTOR;
|
6428 |
|
|
indexss->expr = ar->start[n];
|
6429 |
|
|
indexss->next = gfc_ss_terminator;
|
6430 |
|
|
indexss->loop_chain = gfc_ss_terminator;
|
6431 |
|
|
newss->data.info.subscript[n] = indexss;
|
6432 |
|
|
newss->data.info.dim[newss->data.info.dimen] = n;
|
6433 |
|
|
newss->data.info.dimen++;
|
6434 |
|
|
break;
|
6435 |
|
|
|
6436 |
|
|
default:
|
6437 |
|
|
/* We should know what sort of section it is by now. */
|
6438 |
|
|
gcc_unreachable ();
|
6439 |
|
|
}
|
6440 |
|
|
}
|
6441 |
|
|
/* We should have at least one non-elemental dimension. */
|
6442 |
|
|
gcc_assert (newss->data.info.dimen > 0);
|
6443 |
|
|
ss = newss;
|
6444 |
|
|
break;
|
6445 |
|
|
|
6446 |
|
|
default:
|
6447 |
|
|
/* We should know what sort of section it is by now. */
|
6448 |
|
|
gcc_unreachable ();
|
6449 |
|
|
}
|
6450 |
|
|
|
6451 |
|
|
}
|
6452 |
|
|
return ss;
|
6453 |
|
|
}
|
6454 |
|
|
|
6455 |
|
|
|
6456 |
|
|
/* Walk an expression operator. If only one operand of a binary expression is
|
6457 |
|
|
scalar, we must also add the scalar term to the SS chain. */
|
6458 |
|
|
|
6459 |
|
|
static gfc_ss *
|
6460 |
|
|
gfc_walk_op_expr (gfc_ss * ss, gfc_expr * expr)
|
6461 |
|
|
{
|
6462 |
|
|
gfc_ss *head;
|
6463 |
|
|
gfc_ss *head2;
|
6464 |
|
|
gfc_ss *newss;
|
6465 |
|
|
|
6466 |
|
|
head = gfc_walk_subexpr (ss, expr->value.op.op1);
|
6467 |
|
|
if (expr->value.op.op2 == NULL)
|
6468 |
|
|
head2 = head;
|
6469 |
|
|
else
|
6470 |
|
|
head2 = gfc_walk_subexpr (head, expr->value.op.op2);
|
6471 |
|
|
|
6472 |
|
|
/* All operands are scalar. Pass back and let the caller deal with it. */
|
6473 |
|
|
if (head2 == ss)
|
6474 |
|
|
return head2;
|
6475 |
|
|
|
6476 |
|
|
/* All operands require scalarization. */
|
6477 |
|
|
if (head != ss && (expr->value.op.op2 == NULL || head2 != head))
|
6478 |
|
|
return head2;
|
6479 |
|
|
|
6480 |
|
|
/* One of the operands needs scalarization, the other is scalar.
|
6481 |
|
|
Create a gfc_ss for the scalar expression. */
|
6482 |
|
|
newss = gfc_get_ss ();
|
6483 |
|
|
newss->type = GFC_SS_SCALAR;
|
6484 |
|
|
if (head == ss)
|
6485 |
|
|
{
|
6486 |
|
|
/* First operand is scalar. We build the chain in reverse order, so
|
6487 |
|
|
add the scalar SS after the second operand. */
|
6488 |
|
|
head = head2;
|
6489 |
|
|
while (head && head->next != ss)
|
6490 |
|
|
head = head->next;
|
6491 |
|
|
/* Check we haven't somehow broken the chain. */
|
6492 |
|
|
gcc_assert (head);
|
6493 |
|
|
newss->next = ss;
|
6494 |
|
|
head->next = newss;
|
6495 |
|
|
newss->expr = expr->value.op.op1;
|
6496 |
|
|
}
|
6497 |
|
|
else /* head2 == head */
|
6498 |
|
|
{
|
6499 |
|
|
gcc_assert (head2 == head);
|
6500 |
|
|
/* Second operand is scalar. */
|
6501 |
|
|
newss->next = head2;
|
6502 |
|
|
head2 = newss;
|
6503 |
|
|
newss->expr = expr->value.op.op2;
|
6504 |
|
|
}
|
6505 |
|
|
|
6506 |
|
|
return head2;
|
6507 |
|
|
}
|
6508 |
|
|
|
6509 |
|
|
|
6510 |
|
|
/* Reverse a SS chain. */
|
6511 |
|
|
|
6512 |
|
|
gfc_ss *
|
6513 |
|
|
gfc_reverse_ss (gfc_ss * ss)
|
6514 |
|
|
{
|
6515 |
|
|
gfc_ss *next;
|
6516 |
|
|
gfc_ss *head;
|
6517 |
|
|
|
6518 |
|
|
gcc_assert (ss != NULL);
|
6519 |
|
|
|
6520 |
|
|
head = gfc_ss_terminator;
|
6521 |
|
|
while (ss != gfc_ss_terminator)
|
6522 |
|
|
{
|
6523 |
|
|
next = ss->next;
|
6524 |
|
|
/* Check we didn't somehow break the chain. */
|
6525 |
|
|
gcc_assert (next != NULL);
|
6526 |
|
|
ss->next = head;
|
6527 |
|
|
head = ss;
|
6528 |
|
|
ss = next;
|
6529 |
|
|
}
|
6530 |
|
|
|
6531 |
|
|
return (head);
|
6532 |
|
|
}
|
6533 |
|
|
|
6534 |
|
|
|
6535 |
|
|
/* Walk the arguments of an elemental function. */
|
6536 |
|
|
|
6537 |
|
|
gfc_ss *
|
6538 |
|
|
gfc_walk_elemental_function_args (gfc_ss * ss, gfc_actual_arglist *arg,
|
6539 |
|
|
gfc_ss_type type)
|
6540 |
|
|
{
|
6541 |
|
|
int scalar;
|
6542 |
|
|
gfc_ss *head;
|
6543 |
|
|
gfc_ss *tail;
|
6544 |
|
|
gfc_ss *newss;
|
6545 |
|
|
|
6546 |
|
|
head = gfc_ss_terminator;
|
6547 |
|
|
tail = NULL;
|
6548 |
|
|
scalar = 1;
|
6549 |
|
|
for (; arg; arg = arg->next)
|
6550 |
|
|
{
|
6551 |
|
|
if (!arg->expr)
|
6552 |
|
|
continue;
|
6553 |
|
|
|
6554 |
|
|
newss = gfc_walk_subexpr (head, arg->expr);
|
6555 |
|
|
if (newss == head)
|
6556 |
|
|
{
|
6557 |
|
|
/* Scalar argument. */
|
6558 |
|
|
newss = gfc_get_ss ();
|
6559 |
|
|
newss->type = type;
|
6560 |
|
|
newss->expr = arg->expr;
|
6561 |
|
|
newss->next = head;
|
6562 |
|
|
}
|
6563 |
|
|
else
|
6564 |
|
|
scalar = 0;
|
6565 |
|
|
|
6566 |
|
|
head = newss;
|
6567 |
|
|
if (!tail)
|
6568 |
|
|
{
|
6569 |
|
|
tail = head;
|
6570 |
|
|
while (tail->next != gfc_ss_terminator)
|
6571 |
|
|
tail = tail->next;
|
6572 |
|
|
}
|
6573 |
|
|
}
|
6574 |
|
|
|
6575 |
|
|
if (scalar)
|
6576 |
|
|
{
|
6577 |
|
|
/* If all the arguments are scalar we don't need the argument SS. */
|
6578 |
|
|
gfc_free_ss_chain (head);
|
6579 |
|
|
/* Pass it back. */
|
6580 |
|
|
return ss;
|
6581 |
|
|
}
|
6582 |
|
|
|
6583 |
|
|
/* Add it onto the existing chain. */
|
6584 |
|
|
tail->next = ss;
|
6585 |
|
|
return head;
|
6586 |
|
|
}
|
6587 |
|
|
|
6588 |
|
|
|
6589 |
|
|
/* Walk a function call. Scalar functions are passed back, and taken out of
|
6590 |
|
|
scalarization loops. For elemental functions we walk their arguments.
|
6591 |
|
|
The result of functions returning arrays is stored in a temporary outside
|
6592 |
|
|
the loop, so that the function is only called once. Hence we do not need
|
6593 |
|
|
to walk their arguments. */
|
6594 |
|
|
|
6595 |
|
|
static gfc_ss *
|
6596 |
|
|
gfc_walk_function_expr (gfc_ss * ss, gfc_expr * expr)
|
6597 |
|
|
{
|
6598 |
|
|
gfc_ss *newss;
|
6599 |
|
|
gfc_intrinsic_sym *isym;
|
6600 |
|
|
gfc_symbol *sym;
|
6601 |
|
|
gfc_component *comp = NULL;
|
6602 |
|
|
|
6603 |
|
|
isym = expr->value.function.isym;
|
6604 |
|
|
|
6605 |
|
|
/* Handle intrinsic functions separately. */
|
6606 |
|
|
if (isym)
|
6607 |
|
|
return gfc_walk_intrinsic_function (ss, expr, isym);
|
6608 |
|
|
|
6609 |
|
|
sym = expr->value.function.esym;
|
6610 |
|
|
if (!sym)
|
6611 |
|
|
sym = expr->symtree->n.sym;
|
6612 |
|
|
|
6613 |
|
|
/* A function that returns arrays. */
|
6614 |
|
|
gfc_is_proc_ptr_comp (expr, &comp);
|
6615 |
|
|
if ((!comp && gfc_return_by_reference (sym) && sym->result->attr.dimension)
|
6616 |
|
|
|| (comp && comp->attr.dimension))
|
6617 |
|
|
{
|
6618 |
|
|
newss = gfc_get_ss ();
|
6619 |
|
|
newss->type = GFC_SS_FUNCTION;
|
6620 |
|
|
newss->expr = expr;
|
6621 |
|
|
newss->next = ss;
|
6622 |
|
|
newss->data.info.dimen = expr->rank;
|
6623 |
|
|
return newss;
|
6624 |
|
|
}
|
6625 |
|
|
|
6626 |
|
|
/* Walk the parameters of an elemental function. For now we always pass
|
6627 |
|
|
by reference. */
|
6628 |
|
|
if (sym->attr.elemental)
|
6629 |
|
|
return gfc_walk_elemental_function_args (ss, expr->value.function.actual,
|
6630 |
|
|
GFC_SS_REFERENCE);
|
6631 |
|
|
|
6632 |
|
|
/* Scalar functions are OK as these are evaluated outside the scalarization
|
6633 |
|
|
loop. Pass back and let the caller deal with it. */
|
6634 |
|
|
return ss;
|
6635 |
|
|
}
|
6636 |
|
|
|
6637 |
|
|
|
6638 |
|
|
/* An array temporary is constructed for array constructors. */
|
6639 |
|
|
|
6640 |
|
|
static gfc_ss *
|
6641 |
|
|
gfc_walk_array_constructor (gfc_ss * ss, gfc_expr * expr)
|
6642 |
|
|
{
|
6643 |
|
|
gfc_ss *newss;
|
6644 |
|
|
int n;
|
6645 |
|
|
|
6646 |
|
|
newss = gfc_get_ss ();
|
6647 |
|
|
newss->type = GFC_SS_CONSTRUCTOR;
|
6648 |
|
|
newss->expr = expr;
|
6649 |
|
|
newss->next = ss;
|
6650 |
|
|
newss->data.info.dimen = expr->rank;
|
6651 |
|
|
for (n = 0; n < expr->rank; n++)
|
6652 |
|
|
newss->data.info.dim[n] = n;
|
6653 |
|
|
|
6654 |
|
|
return newss;
|
6655 |
|
|
}
|
6656 |
|
|
|
6657 |
|
|
|
6658 |
|
|
/* Walk an expression. Add walked expressions to the head of the SS chain.
|
6659 |
|
|
A wholly scalar expression will not be added. */
|
6660 |
|
|
|
6661 |
|
|
static gfc_ss *
|
6662 |
|
|
gfc_walk_subexpr (gfc_ss * ss, gfc_expr * expr)
|
6663 |
|
|
{
|
6664 |
|
|
gfc_ss *head;
|
6665 |
|
|
|
6666 |
|
|
switch (expr->expr_type)
|
6667 |
|
|
{
|
6668 |
|
|
case EXPR_VARIABLE:
|
6669 |
|
|
head = gfc_walk_variable_expr (ss, expr);
|
6670 |
|
|
return head;
|
6671 |
|
|
|
6672 |
|
|
case EXPR_OP:
|
6673 |
|
|
head = gfc_walk_op_expr (ss, expr);
|
6674 |
|
|
return head;
|
6675 |
|
|
|
6676 |
|
|
case EXPR_FUNCTION:
|
6677 |
|
|
head = gfc_walk_function_expr (ss, expr);
|
6678 |
|
|
return head;
|
6679 |
|
|
|
6680 |
|
|
case EXPR_CONSTANT:
|
6681 |
|
|
case EXPR_NULL:
|
6682 |
|
|
case EXPR_STRUCTURE:
|
6683 |
|
|
/* Pass back and let the caller deal with it. */
|
6684 |
|
|
break;
|
6685 |
|
|
|
6686 |
|
|
case EXPR_ARRAY:
|
6687 |
|
|
head = gfc_walk_array_constructor (ss, expr);
|
6688 |
|
|
return head;
|
6689 |
|
|
|
6690 |
|
|
case EXPR_SUBSTRING:
|
6691 |
|
|
/* Pass back and let the caller deal with it. */
|
6692 |
|
|
break;
|
6693 |
|
|
|
6694 |
|
|
default:
|
6695 |
|
|
internal_error ("bad expression type during walk (%d)",
|
6696 |
|
|
expr->expr_type);
|
6697 |
|
|
}
|
6698 |
|
|
return ss;
|
6699 |
|
|
}
|
6700 |
|
|
|
6701 |
|
|
|
6702 |
|
|
/* Entry point for expression walking.
|
6703 |
|
|
A return value equal to the passed chain means this is
|
6704 |
|
|
a scalar expression. It is up to the caller to take whatever action is
|
6705 |
|
|
necessary to translate these. */
|
6706 |
|
|
|
6707 |
|
|
gfc_ss *
|
6708 |
|
|
gfc_walk_expr (gfc_expr * expr)
|
6709 |
|
|
{
|
6710 |
|
|
gfc_ss *res;
|
6711 |
|
|
|
6712 |
|
|
res = gfc_walk_subexpr (gfc_ss_terminator, expr);
|
6713 |
|
|
return gfc_reverse_ss (res);
|
6714 |
|
|
}
|