1 |
285 |
jeremybenn |
/* OpenMP directive translation -- generate GCC trees from gfc_code.
|
2 |
|
|
Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
|
3 |
|
|
Contributed by Jakub Jelinek <jakub@redhat.com>
|
4 |
|
|
|
5 |
|
|
This file is part of GCC.
|
6 |
|
|
|
7 |
|
|
GCC is free software; you can redistribute it and/or modify it under
|
8 |
|
|
the terms of the GNU General Public License as published by the Free
|
9 |
|
|
Software Foundation; either version 3, or (at your option) any later
|
10 |
|
|
version.
|
11 |
|
|
|
12 |
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
13 |
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
14 |
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
15 |
|
|
for more details.
|
16 |
|
|
|
17 |
|
|
You should have received a copy of the GNU General Public License
|
18 |
|
|
along with GCC; see the file COPYING3. If not see
|
19 |
|
|
<http://www.gnu.org/licenses/>. */
|
20 |
|
|
|
21 |
|
|
|
22 |
|
|
#include "config.h"
|
23 |
|
|
#include "system.h"
|
24 |
|
|
#include "coretypes.h"
|
25 |
|
|
#include "tree.h"
|
26 |
|
|
#include "gimple.h"
|
27 |
|
|
#include "ggc.h"
|
28 |
|
|
#include "toplev.h"
|
29 |
|
|
#include "real.h"
|
30 |
|
|
#include "gfortran.h"
|
31 |
|
|
#include "trans.h"
|
32 |
|
|
#include "trans-stmt.h"
|
33 |
|
|
#include "trans-types.h"
|
34 |
|
|
#include "trans-array.h"
|
35 |
|
|
#include "trans-const.h"
|
36 |
|
|
#include "arith.h"
|
37 |
|
|
|
38 |
|
|
int ompws_flags;
|
39 |
|
|
|
40 |
|
|
/* True if OpenMP should privatize what this DECL points to rather
|
41 |
|
|
than the DECL itself. */
|
42 |
|
|
|
43 |
|
|
bool
|
44 |
|
|
gfc_omp_privatize_by_reference (const_tree decl)
|
45 |
|
|
{
|
46 |
|
|
tree type = TREE_TYPE (decl);
|
47 |
|
|
|
48 |
|
|
if (TREE_CODE (type) == REFERENCE_TYPE
|
49 |
|
|
&& (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
|
50 |
|
|
return true;
|
51 |
|
|
|
52 |
|
|
if (TREE_CODE (type) == POINTER_TYPE)
|
53 |
|
|
{
|
54 |
|
|
/* Array POINTER/ALLOCATABLE have aggregate types, all user variables
|
55 |
|
|
that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P
|
56 |
|
|
set are supposed to be privatized by reference. */
|
57 |
|
|
if (GFC_POINTER_TYPE_P (type))
|
58 |
|
|
return false;
|
59 |
|
|
|
60 |
|
|
if (!DECL_ARTIFICIAL (decl)
|
61 |
|
|
&& TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
|
62 |
|
|
return true;
|
63 |
|
|
|
64 |
|
|
/* Some arrays are expanded as DECL_ARTIFICIAL pointers
|
65 |
|
|
by the frontend. */
|
66 |
|
|
if (DECL_LANG_SPECIFIC (decl)
|
67 |
|
|
&& GFC_DECL_SAVED_DESCRIPTOR (decl))
|
68 |
|
|
return true;
|
69 |
|
|
}
|
70 |
|
|
|
71 |
|
|
return false;
|
72 |
|
|
}
|
73 |
|
|
|
74 |
|
|
/* True if OpenMP sharing attribute of DECL is predetermined. */
|
75 |
|
|
|
76 |
|
|
enum omp_clause_default_kind
|
77 |
|
|
gfc_omp_predetermined_sharing (tree decl)
|
78 |
|
|
{
|
79 |
|
|
if (DECL_ARTIFICIAL (decl)
|
80 |
|
|
&& ! GFC_DECL_RESULT (decl)
|
81 |
|
|
&& ! (DECL_LANG_SPECIFIC (decl)
|
82 |
|
|
&& GFC_DECL_SAVED_DESCRIPTOR (decl)))
|
83 |
|
|
return OMP_CLAUSE_DEFAULT_SHARED;
|
84 |
|
|
|
85 |
|
|
/* Cray pointees shouldn't be listed in any clauses and should be
|
86 |
|
|
gimplified to dereference of the corresponding Cray pointer.
|
87 |
|
|
Make them all private, so that they are emitted in the debug
|
88 |
|
|
information. */
|
89 |
|
|
if (GFC_DECL_CRAY_POINTEE (decl))
|
90 |
|
|
return OMP_CLAUSE_DEFAULT_PRIVATE;
|
91 |
|
|
|
92 |
|
|
/* Assumed-size arrays are predetermined to inherit sharing
|
93 |
|
|
attributes of the associated actual argument, which is shared
|
94 |
|
|
for all we care. */
|
95 |
|
|
if (TREE_CODE (decl) == PARM_DECL
|
96 |
|
|
&& GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
|
97 |
|
|
&& GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
|
98 |
|
|
&& GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
|
99 |
|
|
GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
|
100 |
|
|
== NULL)
|
101 |
|
|
return OMP_CLAUSE_DEFAULT_SHARED;
|
102 |
|
|
|
103 |
|
|
/* Dummy procedures aren't considered variables by OpenMP, thus are
|
104 |
|
|
disallowed in OpenMP clauses. They are represented as PARM_DECLs
|
105 |
|
|
in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
|
106 |
|
|
to avoid complaining about their uses with default(none). */
|
107 |
|
|
if (TREE_CODE (decl) == PARM_DECL
|
108 |
|
|
&& TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
|
109 |
|
|
&& TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == FUNCTION_TYPE)
|
110 |
|
|
return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
|
111 |
|
|
|
112 |
|
|
/* COMMON and EQUIVALENCE decls are shared. They
|
113 |
|
|
are only referenced through DECL_VALUE_EXPR of the variables
|
114 |
|
|
contained in them. If those are privatized, they will not be
|
115 |
|
|
gimplified to the COMMON or EQUIVALENCE decls. */
|
116 |
|
|
if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
|
117 |
|
|
return OMP_CLAUSE_DEFAULT_SHARED;
|
118 |
|
|
|
119 |
|
|
if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
|
120 |
|
|
return OMP_CLAUSE_DEFAULT_SHARED;
|
121 |
|
|
|
122 |
|
|
return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
|
123 |
|
|
}
|
124 |
|
|
|
125 |
|
|
/* Return decl that should be used when reporting DEFAULT(NONE)
|
126 |
|
|
diagnostics. */
|
127 |
|
|
|
128 |
|
|
tree
|
129 |
|
|
gfc_omp_report_decl (tree decl)
|
130 |
|
|
{
|
131 |
|
|
if (DECL_ARTIFICIAL (decl)
|
132 |
|
|
&& DECL_LANG_SPECIFIC (decl)
|
133 |
|
|
&& GFC_DECL_SAVED_DESCRIPTOR (decl))
|
134 |
|
|
return GFC_DECL_SAVED_DESCRIPTOR (decl);
|
135 |
|
|
|
136 |
|
|
return decl;
|
137 |
|
|
}
|
138 |
|
|
|
139 |
|
|
/* Return true if DECL in private clause needs
|
140 |
|
|
OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
|
141 |
|
|
bool
|
142 |
|
|
gfc_omp_private_outer_ref (tree decl)
|
143 |
|
|
{
|
144 |
|
|
tree type = TREE_TYPE (decl);
|
145 |
|
|
|
146 |
|
|
if (GFC_DESCRIPTOR_TYPE_P (type)
|
147 |
|
|
&& GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
|
148 |
|
|
return true;
|
149 |
|
|
|
150 |
|
|
return false;
|
151 |
|
|
}
|
152 |
|
|
|
153 |
|
|
/* Return code to initialize DECL with its default constructor, or
|
154 |
|
|
NULL if there's nothing to do. */
|
155 |
|
|
|
156 |
|
|
tree
|
157 |
|
|
gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
|
158 |
|
|
{
|
159 |
|
|
tree type = TREE_TYPE (decl), rank, size, esize, ptr, cond, then_b, else_b;
|
160 |
|
|
stmtblock_t block, cond_block;
|
161 |
|
|
|
162 |
|
|
if (! GFC_DESCRIPTOR_TYPE_P (type)
|
163 |
|
|
|| GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
|
164 |
|
|
return NULL;
|
165 |
|
|
|
166 |
|
|
gcc_assert (outer != NULL);
|
167 |
|
|
gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
|
168 |
|
|
|| OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE);
|
169 |
|
|
|
170 |
|
|
/* Allocatable arrays in PRIVATE clauses need to be set to
|
171 |
|
|
"not currently allocated" allocation status if outer
|
172 |
|
|
array is "not currently allocated", otherwise should be allocated. */
|
173 |
|
|
gfc_start_block (&block);
|
174 |
|
|
|
175 |
|
|
gfc_init_block (&cond_block);
|
176 |
|
|
|
177 |
|
|
gfc_add_modify (&cond_block, decl, outer);
|
178 |
|
|
rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
|
179 |
|
|
size = gfc_conv_descriptor_ubound_get (decl, rank);
|
180 |
|
|
size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
|
181 |
|
|
gfc_conv_descriptor_lbound_get (decl, rank));
|
182 |
|
|
size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
|
183 |
|
|
gfc_index_one_node);
|
184 |
|
|
if (GFC_TYPE_ARRAY_RANK (type) > 1)
|
185 |
|
|
size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
|
186 |
|
|
gfc_conv_descriptor_stride_get (decl, rank));
|
187 |
|
|
esize = fold_convert (gfc_array_index_type,
|
188 |
|
|
TYPE_SIZE_UNIT (gfc_get_element_type (type)));
|
189 |
|
|
size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
|
190 |
|
|
size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
|
191 |
|
|
ptr = gfc_allocate_array_with_status (&cond_block,
|
192 |
|
|
build_int_cst (pvoid_type_node, 0),
|
193 |
|
|
size, NULL, NULL);
|
194 |
|
|
gfc_conv_descriptor_data_set (&cond_block, decl, ptr);
|
195 |
|
|
then_b = gfc_finish_block (&cond_block);
|
196 |
|
|
|
197 |
|
|
gfc_init_block (&cond_block);
|
198 |
|
|
gfc_conv_descriptor_data_set (&cond_block, decl, null_pointer_node);
|
199 |
|
|
else_b = gfc_finish_block (&cond_block);
|
200 |
|
|
|
201 |
|
|
cond = fold_build2 (NE_EXPR, boolean_type_node,
|
202 |
|
|
fold_convert (pvoid_type_node,
|
203 |
|
|
gfc_conv_descriptor_data_get (outer)),
|
204 |
|
|
null_pointer_node);
|
205 |
|
|
gfc_add_expr_to_block (&block, build3 (COND_EXPR, void_type_node,
|
206 |
|
|
cond, then_b, else_b));
|
207 |
|
|
|
208 |
|
|
return gfc_finish_block (&block);
|
209 |
|
|
}
|
210 |
|
|
|
211 |
|
|
/* Build and return code for a copy constructor from SRC to DEST. */
|
212 |
|
|
|
213 |
|
|
tree
|
214 |
|
|
gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
|
215 |
|
|
{
|
216 |
|
|
tree type = TREE_TYPE (dest), ptr, size, esize, rank, call;
|
217 |
|
|
stmtblock_t block;
|
218 |
|
|
|
219 |
|
|
if (! GFC_DESCRIPTOR_TYPE_P (type)
|
220 |
|
|
|| GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
|
221 |
|
|
return build2_v (MODIFY_EXPR, dest, src);
|
222 |
|
|
|
223 |
|
|
gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE);
|
224 |
|
|
|
225 |
|
|
/* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
|
226 |
|
|
and copied from SRC. */
|
227 |
|
|
gfc_start_block (&block);
|
228 |
|
|
|
229 |
|
|
gfc_add_modify (&block, dest, src);
|
230 |
|
|
rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
|
231 |
|
|
size = gfc_conv_descriptor_ubound_get (dest, rank);
|
232 |
|
|
size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
|
233 |
|
|
gfc_conv_descriptor_lbound_get (dest, rank));
|
234 |
|
|
size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
|
235 |
|
|
gfc_index_one_node);
|
236 |
|
|
if (GFC_TYPE_ARRAY_RANK (type) > 1)
|
237 |
|
|
size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
|
238 |
|
|
gfc_conv_descriptor_stride_get (dest, rank));
|
239 |
|
|
esize = fold_convert (gfc_array_index_type,
|
240 |
|
|
TYPE_SIZE_UNIT (gfc_get_element_type (type)));
|
241 |
|
|
size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
|
242 |
|
|
size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
|
243 |
|
|
ptr = gfc_allocate_array_with_status (&block,
|
244 |
|
|
build_int_cst (pvoid_type_node, 0),
|
245 |
|
|
size, NULL, NULL);
|
246 |
|
|
gfc_conv_descriptor_data_set (&block, dest, ptr);
|
247 |
|
|
call = build_call_expr_loc (input_location,
|
248 |
|
|
built_in_decls[BUILT_IN_MEMCPY], 3, ptr,
|
249 |
|
|
fold_convert (pvoid_type_node,
|
250 |
|
|
gfc_conv_descriptor_data_get (src)),
|
251 |
|
|
size);
|
252 |
|
|
gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
|
253 |
|
|
|
254 |
|
|
return gfc_finish_block (&block);
|
255 |
|
|
}
|
256 |
|
|
|
257 |
|
|
/* Similarly, except use an assignment operator instead. */
|
258 |
|
|
|
259 |
|
|
tree
|
260 |
|
|
gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src)
|
261 |
|
|
{
|
262 |
|
|
tree type = TREE_TYPE (dest), rank, size, esize, call;
|
263 |
|
|
stmtblock_t block;
|
264 |
|
|
|
265 |
|
|
if (! GFC_DESCRIPTOR_TYPE_P (type)
|
266 |
|
|
|| GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
|
267 |
|
|
return build2_v (MODIFY_EXPR, dest, src);
|
268 |
|
|
|
269 |
|
|
/* Handle copying allocatable arrays. */
|
270 |
|
|
gfc_start_block (&block);
|
271 |
|
|
|
272 |
|
|
rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
|
273 |
|
|
size = gfc_conv_descriptor_ubound_get (dest, rank);
|
274 |
|
|
size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
|
275 |
|
|
gfc_conv_descriptor_lbound_get (dest, rank));
|
276 |
|
|
size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
|
277 |
|
|
gfc_index_one_node);
|
278 |
|
|
if (GFC_TYPE_ARRAY_RANK (type) > 1)
|
279 |
|
|
size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
|
280 |
|
|
gfc_conv_descriptor_stride_get (dest, rank));
|
281 |
|
|
esize = fold_convert (gfc_array_index_type,
|
282 |
|
|
TYPE_SIZE_UNIT (gfc_get_element_type (type)));
|
283 |
|
|
size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
|
284 |
|
|
size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
|
285 |
|
|
call = build_call_expr_loc (input_location,
|
286 |
|
|
built_in_decls[BUILT_IN_MEMCPY], 3,
|
287 |
|
|
fold_convert (pvoid_type_node,
|
288 |
|
|
gfc_conv_descriptor_data_get (dest)),
|
289 |
|
|
fold_convert (pvoid_type_node,
|
290 |
|
|
gfc_conv_descriptor_data_get (src)),
|
291 |
|
|
size);
|
292 |
|
|
gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
|
293 |
|
|
|
294 |
|
|
return gfc_finish_block (&block);
|
295 |
|
|
}
|
296 |
|
|
|
297 |
|
|
/* Build and return code destructing DECL. Return NULL if nothing
|
298 |
|
|
to be done. */
|
299 |
|
|
|
300 |
|
|
tree
|
301 |
|
|
gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED, tree decl)
|
302 |
|
|
{
|
303 |
|
|
tree type = TREE_TYPE (decl);
|
304 |
|
|
|
305 |
|
|
if (! GFC_DESCRIPTOR_TYPE_P (type)
|
306 |
|
|
|| GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
|
307 |
|
|
return NULL;
|
308 |
|
|
|
309 |
|
|
/* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
|
310 |
|
|
to be deallocated if they were allocated. */
|
311 |
|
|
return gfc_trans_dealloc_allocated (decl);
|
312 |
|
|
}
|
313 |
|
|
|
314 |
|
|
|
315 |
|
|
/* Return true if DECL's DECL_VALUE_EXPR (if any) should be
|
316 |
|
|
disregarded in OpenMP construct, because it is going to be
|
317 |
|
|
remapped during OpenMP lowering. SHARED is true if DECL
|
318 |
|
|
is going to be shared, false if it is going to be privatized. */
|
319 |
|
|
|
320 |
|
|
bool
|
321 |
|
|
gfc_omp_disregard_value_expr (tree decl, bool shared)
|
322 |
|
|
{
|
323 |
|
|
if (GFC_DECL_COMMON_OR_EQUIV (decl)
|
324 |
|
|
&& DECL_HAS_VALUE_EXPR_P (decl))
|
325 |
|
|
{
|
326 |
|
|
tree value = DECL_VALUE_EXPR (decl);
|
327 |
|
|
|
328 |
|
|
if (TREE_CODE (value) == COMPONENT_REF
|
329 |
|
|
&& TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
|
330 |
|
|
&& GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
|
331 |
|
|
{
|
332 |
|
|
/* If variable in COMMON or EQUIVALENCE is privatized, return
|
333 |
|
|
true, as just that variable is supposed to be privatized,
|
334 |
|
|
not the whole COMMON or whole EQUIVALENCE.
|
335 |
|
|
For shared variables in COMMON or EQUIVALENCE, let them be
|
336 |
|
|
gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
|
337 |
|
|
from the same COMMON or EQUIVALENCE just one sharing of the
|
338 |
|
|
whole COMMON or EQUIVALENCE is enough. */
|
339 |
|
|
return ! shared;
|
340 |
|
|
}
|
341 |
|
|
}
|
342 |
|
|
|
343 |
|
|
if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
|
344 |
|
|
return ! shared;
|
345 |
|
|
|
346 |
|
|
return false;
|
347 |
|
|
}
|
348 |
|
|
|
349 |
|
|
/* Return true if DECL that is shared iff SHARED is true should
|
350 |
|
|
be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
|
351 |
|
|
flag set. */
|
352 |
|
|
|
353 |
|
|
bool
|
354 |
|
|
gfc_omp_private_debug_clause (tree decl, bool shared)
|
355 |
|
|
{
|
356 |
|
|
if (GFC_DECL_CRAY_POINTEE (decl))
|
357 |
|
|
return true;
|
358 |
|
|
|
359 |
|
|
if (GFC_DECL_COMMON_OR_EQUIV (decl)
|
360 |
|
|
&& DECL_HAS_VALUE_EXPR_P (decl))
|
361 |
|
|
{
|
362 |
|
|
tree value = DECL_VALUE_EXPR (decl);
|
363 |
|
|
|
364 |
|
|
if (TREE_CODE (value) == COMPONENT_REF
|
365 |
|
|
&& TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
|
366 |
|
|
&& GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
|
367 |
|
|
return shared;
|
368 |
|
|
}
|
369 |
|
|
|
370 |
|
|
return false;
|
371 |
|
|
}
|
372 |
|
|
|
373 |
|
|
/* Register language specific type size variables as potentially OpenMP
|
374 |
|
|
firstprivate variables. */
|
375 |
|
|
|
376 |
|
|
void
|
377 |
|
|
gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
|
378 |
|
|
{
|
379 |
|
|
if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
|
380 |
|
|
{
|
381 |
|
|
int r;
|
382 |
|
|
|
383 |
|
|
gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
|
384 |
|
|
for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
|
385 |
|
|
{
|
386 |
|
|
omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
|
387 |
|
|
omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
|
388 |
|
|
omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
|
389 |
|
|
}
|
390 |
|
|
omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
|
391 |
|
|
omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
|
392 |
|
|
}
|
393 |
|
|
}
|
394 |
|
|
|
395 |
|
|
|
396 |
|
|
static inline tree
|
397 |
|
|
gfc_trans_add_clause (tree node, tree tail)
|
398 |
|
|
{
|
399 |
|
|
OMP_CLAUSE_CHAIN (node) = tail;
|
400 |
|
|
return node;
|
401 |
|
|
}
|
402 |
|
|
|
403 |
|
|
static tree
|
404 |
|
|
gfc_trans_omp_variable (gfc_symbol *sym)
|
405 |
|
|
{
|
406 |
|
|
tree t = gfc_get_symbol_decl (sym);
|
407 |
|
|
tree parent_decl;
|
408 |
|
|
int parent_flag;
|
409 |
|
|
bool return_value;
|
410 |
|
|
bool alternate_entry;
|
411 |
|
|
bool entry_master;
|
412 |
|
|
|
413 |
|
|
return_value = sym->attr.function && sym->result == sym;
|
414 |
|
|
alternate_entry = sym->attr.function && sym->attr.entry
|
415 |
|
|
&& sym->result == sym;
|
416 |
|
|
entry_master = sym->attr.result
|
417 |
|
|
&& sym->ns->proc_name->attr.entry_master
|
418 |
|
|
&& !gfc_return_by_reference (sym->ns->proc_name);
|
419 |
|
|
parent_decl = DECL_CONTEXT (current_function_decl);
|
420 |
|
|
|
421 |
|
|
if ((t == parent_decl && return_value)
|
422 |
|
|
|| (sym->ns && sym->ns->proc_name
|
423 |
|
|
&& sym->ns->proc_name->backend_decl == parent_decl
|
424 |
|
|
&& (alternate_entry || entry_master)))
|
425 |
|
|
parent_flag = 1;
|
426 |
|
|
else
|
427 |
|
|
parent_flag = 0;
|
428 |
|
|
|
429 |
|
|
/* Special case for assigning the return value of a function.
|
430 |
|
|
Self recursive functions must have an explicit return value. */
|
431 |
|
|
if (return_value && (t == current_function_decl || parent_flag))
|
432 |
|
|
t = gfc_get_fake_result_decl (sym, parent_flag);
|
433 |
|
|
|
434 |
|
|
/* Similarly for alternate entry points. */
|
435 |
|
|
else if (alternate_entry
|
436 |
|
|
&& (sym->ns->proc_name->backend_decl == current_function_decl
|
437 |
|
|
|| parent_flag))
|
438 |
|
|
{
|
439 |
|
|
gfc_entry_list *el = NULL;
|
440 |
|
|
|
441 |
|
|
for (el = sym->ns->entries; el; el = el->next)
|
442 |
|
|
if (sym == el->sym)
|
443 |
|
|
{
|
444 |
|
|
t = gfc_get_fake_result_decl (sym, parent_flag);
|
445 |
|
|
break;
|
446 |
|
|
}
|
447 |
|
|
}
|
448 |
|
|
|
449 |
|
|
else if (entry_master
|
450 |
|
|
&& (sym->ns->proc_name->backend_decl == current_function_decl
|
451 |
|
|
|| parent_flag))
|
452 |
|
|
t = gfc_get_fake_result_decl (sym, parent_flag);
|
453 |
|
|
|
454 |
|
|
return t;
|
455 |
|
|
}
|
456 |
|
|
|
457 |
|
|
static tree
|
458 |
|
|
gfc_trans_omp_variable_list (enum omp_clause_code code, gfc_namelist *namelist,
|
459 |
|
|
tree list)
|
460 |
|
|
{
|
461 |
|
|
for (; namelist != NULL; namelist = namelist->next)
|
462 |
|
|
if (namelist->sym->attr.referenced)
|
463 |
|
|
{
|
464 |
|
|
tree t = gfc_trans_omp_variable (namelist->sym);
|
465 |
|
|
if (t != error_mark_node)
|
466 |
|
|
{
|
467 |
|
|
tree node = build_omp_clause (input_location, code);
|
468 |
|
|
OMP_CLAUSE_DECL (node) = t;
|
469 |
|
|
list = gfc_trans_add_clause (node, list);
|
470 |
|
|
}
|
471 |
|
|
}
|
472 |
|
|
return list;
|
473 |
|
|
}
|
474 |
|
|
|
475 |
|
|
static void
|
476 |
|
|
gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where)
|
477 |
|
|
{
|
478 |
|
|
gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
|
479 |
|
|
gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
|
480 |
|
|
gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
|
481 |
|
|
gfc_expr *e1, *e2, *e3, *e4;
|
482 |
|
|
gfc_ref *ref;
|
483 |
|
|
tree decl, backend_decl, stmt;
|
484 |
|
|
locus old_loc = gfc_current_locus;
|
485 |
|
|
const char *iname;
|
486 |
|
|
gfc_try t;
|
487 |
|
|
|
488 |
|
|
decl = OMP_CLAUSE_DECL (c);
|
489 |
|
|
gfc_current_locus = where;
|
490 |
|
|
|
491 |
|
|
/* Create a fake symbol for init value. */
|
492 |
|
|
memset (&init_val_sym, 0, sizeof (init_val_sym));
|
493 |
|
|
init_val_sym.ns = sym->ns;
|
494 |
|
|
init_val_sym.name = sym->name;
|
495 |
|
|
init_val_sym.ts = sym->ts;
|
496 |
|
|
init_val_sym.attr.referenced = 1;
|
497 |
|
|
init_val_sym.declared_at = where;
|
498 |
|
|
init_val_sym.attr.flavor = FL_VARIABLE;
|
499 |
|
|
backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
|
500 |
|
|
init_val_sym.backend_decl = backend_decl;
|
501 |
|
|
|
502 |
|
|
/* Create a fake symbol for the outer array reference. */
|
503 |
|
|
outer_sym = *sym;
|
504 |
|
|
outer_sym.as = gfc_copy_array_spec (sym->as);
|
505 |
|
|
outer_sym.attr.dummy = 0;
|
506 |
|
|
outer_sym.attr.result = 0;
|
507 |
|
|
outer_sym.attr.flavor = FL_VARIABLE;
|
508 |
|
|
outer_sym.backend_decl = create_tmp_var_raw (TREE_TYPE (decl), NULL);
|
509 |
|
|
|
510 |
|
|
/* Create fake symtrees for it. */
|
511 |
|
|
symtree1 = gfc_new_symtree (&root1, sym->name);
|
512 |
|
|
symtree1->n.sym = sym;
|
513 |
|
|
gcc_assert (symtree1 == root1);
|
514 |
|
|
|
515 |
|
|
symtree2 = gfc_new_symtree (&root2, sym->name);
|
516 |
|
|
symtree2->n.sym = &init_val_sym;
|
517 |
|
|
gcc_assert (symtree2 == root2);
|
518 |
|
|
|
519 |
|
|
symtree3 = gfc_new_symtree (&root3, sym->name);
|
520 |
|
|
symtree3->n.sym = &outer_sym;
|
521 |
|
|
gcc_assert (symtree3 == root3);
|
522 |
|
|
|
523 |
|
|
/* Create expressions. */
|
524 |
|
|
e1 = gfc_get_expr ();
|
525 |
|
|
e1->expr_type = EXPR_VARIABLE;
|
526 |
|
|
e1->where = where;
|
527 |
|
|
e1->symtree = symtree1;
|
528 |
|
|
e1->ts = sym->ts;
|
529 |
|
|
e1->ref = ref = gfc_get_ref ();
|
530 |
|
|
ref->type = REF_ARRAY;
|
531 |
|
|
ref->u.ar.where = where;
|
532 |
|
|
ref->u.ar.as = sym->as;
|
533 |
|
|
ref->u.ar.type = AR_FULL;
|
534 |
|
|
ref->u.ar.dimen = 0;
|
535 |
|
|
t = gfc_resolve_expr (e1);
|
536 |
|
|
gcc_assert (t == SUCCESS);
|
537 |
|
|
|
538 |
|
|
e2 = gfc_get_expr ();
|
539 |
|
|
e2->expr_type = EXPR_VARIABLE;
|
540 |
|
|
e2->where = where;
|
541 |
|
|
e2->symtree = symtree2;
|
542 |
|
|
e2->ts = sym->ts;
|
543 |
|
|
t = gfc_resolve_expr (e2);
|
544 |
|
|
gcc_assert (t == SUCCESS);
|
545 |
|
|
|
546 |
|
|
e3 = gfc_copy_expr (e1);
|
547 |
|
|
e3->symtree = symtree3;
|
548 |
|
|
t = gfc_resolve_expr (e3);
|
549 |
|
|
gcc_assert (t == SUCCESS);
|
550 |
|
|
|
551 |
|
|
iname = NULL;
|
552 |
|
|
switch (OMP_CLAUSE_REDUCTION_CODE (c))
|
553 |
|
|
{
|
554 |
|
|
case PLUS_EXPR:
|
555 |
|
|
case MINUS_EXPR:
|
556 |
|
|
e4 = gfc_add (e3, e1);
|
557 |
|
|
break;
|
558 |
|
|
case MULT_EXPR:
|
559 |
|
|
e4 = gfc_multiply (e3, e1);
|
560 |
|
|
break;
|
561 |
|
|
case TRUTH_ANDIF_EXPR:
|
562 |
|
|
e4 = gfc_and (e3, e1);
|
563 |
|
|
break;
|
564 |
|
|
case TRUTH_ORIF_EXPR:
|
565 |
|
|
e4 = gfc_or (e3, e1);
|
566 |
|
|
break;
|
567 |
|
|
case EQ_EXPR:
|
568 |
|
|
e4 = gfc_eqv (e3, e1);
|
569 |
|
|
break;
|
570 |
|
|
case NE_EXPR:
|
571 |
|
|
e4 = gfc_neqv (e3, e1);
|
572 |
|
|
break;
|
573 |
|
|
case MIN_EXPR:
|
574 |
|
|
iname = "min";
|
575 |
|
|
break;
|
576 |
|
|
case MAX_EXPR:
|
577 |
|
|
iname = "max";
|
578 |
|
|
break;
|
579 |
|
|
case BIT_AND_EXPR:
|
580 |
|
|
iname = "iand";
|
581 |
|
|
break;
|
582 |
|
|
case BIT_IOR_EXPR:
|
583 |
|
|
iname = "ior";
|
584 |
|
|
break;
|
585 |
|
|
case BIT_XOR_EXPR:
|
586 |
|
|
iname = "ieor";
|
587 |
|
|
break;
|
588 |
|
|
default:
|
589 |
|
|
gcc_unreachable ();
|
590 |
|
|
}
|
591 |
|
|
if (iname != NULL)
|
592 |
|
|
{
|
593 |
|
|
memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
|
594 |
|
|
intrinsic_sym.ns = sym->ns;
|
595 |
|
|
intrinsic_sym.name = iname;
|
596 |
|
|
intrinsic_sym.ts = sym->ts;
|
597 |
|
|
intrinsic_sym.attr.referenced = 1;
|
598 |
|
|
intrinsic_sym.attr.intrinsic = 1;
|
599 |
|
|
intrinsic_sym.attr.function = 1;
|
600 |
|
|
intrinsic_sym.result = &intrinsic_sym;
|
601 |
|
|
intrinsic_sym.declared_at = where;
|
602 |
|
|
|
603 |
|
|
symtree4 = gfc_new_symtree (&root4, iname);
|
604 |
|
|
symtree4->n.sym = &intrinsic_sym;
|
605 |
|
|
gcc_assert (symtree4 == root4);
|
606 |
|
|
|
607 |
|
|
e4 = gfc_get_expr ();
|
608 |
|
|
e4->expr_type = EXPR_FUNCTION;
|
609 |
|
|
e4->where = where;
|
610 |
|
|
e4->symtree = symtree4;
|
611 |
|
|
e4->value.function.isym = gfc_find_function (iname);
|
612 |
|
|
e4->value.function.actual = gfc_get_actual_arglist ();
|
613 |
|
|
e4->value.function.actual->expr = e3;
|
614 |
|
|
e4->value.function.actual->next = gfc_get_actual_arglist ();
|
615 |
|
|
e4->value.function.actual->next->expr = e1;
|
616 |
|
|
}
|
617 |
|
|
/* e1 and e3 have been stored as arguments of e4, avoid sharing. */
|
618 |
|
|
e1 = gfc_copy_expr (e1);
|
619 |
|
|
e3 = gfc_copy_expr (e3);
|
620 |
|
|
t = gfc_resolve_expr (e4);
|
621 |
|
|
gcc_assert (t == SUCCESS);
|
622 |
|
|
|
623 |
|
|
/* Create the init statement list. */
|
624 |
|
|
pushlevel (0);
|
625 |
|
|
if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
|
626 |
|
|
&& GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE)
|
627 |
|
|
{
|
628 |
|
|
/* If decl is an allocatable array, it needs to be allocated
|
629 |
|
|
with the same bounds as the outer var. */
|
630 |
|
|
tree type = TREE_TYPE (decl), rank, size, esize, ptr;
|
631 |
|
|
stmtblock_t block;
|
632 |
|
|
|
633 |
|
|
gfc_start_block (&block);
|
634 |
|
|
|
635 |
|
|
gfc_add_modify (&block, decl, outer_sym.backend_decl);
|
636 |
|
|
rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
|
637 |
|
|
size = gfc_conv_descriptor_ubound_get (decl, rank);
|
638 |
|
|
size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
|
639 |
|
|
gfc_conv_descriptor_lbound_get (decl, rank));
|
640 |
|
|
size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
|
641 |
|
|
gfc_index_one_node);
|
642 |
|
|
if (GFC_TYPE_ARRAY_RANK (type) > 1)
|
643 |
|
|
size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
|
644 |
|
|
gfc_conv_descriptor_stride_get (decl, rank));
|
645 |
|
|
esize = fold_convert (gfc_array_index_type,
|
646 |
|
|
TYPE_SIZE_UNIT (gfc_get_element_type (type)));
|
647 |
|
|
size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
|
648 |
|
|
size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
|
649 |
|
|
ptr = gfc_allocate_array_with_status (&block,
|
650 |
|
|
build_int_cst (pvoid_type_node, 0),
|
651 |
|
|
size, NULL, NULL);
|
652 |
|
|
gfc_conv_descriptor_data_set (&block, decl, ptr);
|
653 |
|
|
gfc_add_expr_to_block (&block, gfc_trans_assignment (e1, e2, false));
|
654 |
|
|
stmt = gfc_finish_block (&block);
|
655 |
|
|
}
|
656 |
|
|
else
|
657 |
|
|
stmt = gfc_trans_assignment (e1, e2, false);
|
658 |
|
|
if (TREE_CODE (stmt) != BIND_EXPR)
|
659 |
|
|
stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
|
660 |
|
|
else
|
661 |
|
|
poplevel (0, 0, 0);
|
662 |
|
|
OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
|
663 |
|
|
|
664 |
|
|
/* Create the merge statement list. */
|
665 |
|
|
pushlevel (0);
|
666 |
|
|
if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
|
667 |
|
|
&& GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE)
|
668 |
|
|
{
|
669 |
|
|
/* If decl is an allocatable array, it needs to be deallocated
|
670 |
|
|
afterwards. */
|
671 |
|
|
stmtblock_t block;
|
672 |
|
|
|
673 |
|
|
gfc_start_block (&block);
|
674 |
|
|
gfc_add_expr_to_block (&block, gfc_trans_assignment (e3, e4, false));
|
675 |
|
|
gfc_add_expr_to_block (&block, gfc_trans_dealloc_allocated (decl));
|
676 |
|
|
stmt = gfc_finish_block (&block);
|
677 |
|
|
}
|
678 |
|
|
else
|
679 |
|
|
stmt = gfc_trans_assignment (e3, e4, false);
|
680 |
|
|
if (TREE_CODE (stmt) != BIND_EXPR)
|
681 |
|
|
stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
|
682 |
|
|
else
|
683 |
|
|
poplevel (0, 0, 0);
|
684 |
|
|
OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
|
685 |
|
|
|
686 |
|
|
/* And stick the placeholder VAR_DECL into the clause as well. */
|
687 |
|
|
OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_sym.backend_decl;
|
688 |
|
|
|
689 |
|
|
gfc_current_locus = old_loc;
|
690 |
|
|
|
691 |
|
|
gfc_free_expr (e1);
|
692 |
|
|
gfc_free_expr (e2);
|
693 |
|
|
gfc_free_expr (e3);
|
694 |
|
|
gfc_free_expr (e4);
|
695 |
|
|
gfc_free (symtree1);
|
696 |
|
|
gfc_free (symtree2);
|
697 |
|
|
gfc_free (symtree3);
|
698 |
|
|
if (symtree4)
|
699 |
|
|
gfc_free (symtree4);
|
700 |
|
|
gfc_free_array_spec (outer_sym.as);
|
701 |
|
|
}
|
702 |
|
|
|
703 |
|
|
static tree
|
704 |
|
|
gfc_trans_omp_reduction_list (gfc_namelist *namelist, tree list,
|
705 |
|
|
enum tree_code reduction_code, locus where)
|
706 |
|
|
{
|
707 |
|
|
for (; namelist != NULL; namelist = namelist->next)
|
708 |
|
|
if (namelist->sym->attr.referenced)
|
709 |
|
|
{
|
710 |
|
|
tree t = gfc_trans_omp_variable (namelist->sym);
|
711 |
|
|
if (t != error_mark_node)
|
712 |
|
|
{
|
713 |
|
|
tree node = build_omp_clause (where.lb->location,
|
714 |
|
|
OMP_CLAUSE_REDUCTION);
|
715 |
|
|
OMP_CLAUSE_DECL (node) = t;
|
716 |
|
|
OMP_CLAUSE_REDUCTION_CODE (node) = reduction_code;
|
717 |
|
|
if (namelist->sym->attr.dimension)
|
718 |
|
|
gfc_trans_omp_array_reduction (node, namelist->sym, where);
|
719 |
|
|
list = gfc_trans_add_clause (node, list);
|
720 |
|
|
}
|
721 |
|
|
}
|
722 |
|
|
return list;
|
723 |
|
|
}
|
724 |
|
|
|
725 |
|
|
static tree
|
726 |
|
|
gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
|
727 |
|
|
locus where)
|
728 |
|
|
{
|
729 |
|
|
tree omp_clauses = NULL_TREE, chunk_size, c;
|
730 |
|
|
int list;
|
731 |
|
|
enum omp_clause_code clause_code;
|
732 |
|
|
gfc_se se;
|
733 |
|
|
|
734 |
|
|
if (clauses == NULL)
|
735 |
|
|
return NULL_TREE;
|
736 |
|
|
|
737 |
|
|
for (list = 0; list < OMP_LIST_NUM; list++)
|
738 |
|
|
{
|
739 |
|
|
gfc_namelist *n = clauses->lists[list];
|
740 |
|
|
|
741 |
|
|
if (n == NULL)
|
742 |
|
|
continue;
|
743 |
|
|
if (list >= OMP_LIST_REDUCTION_FIRST
|
744 |
|
|
&& list <= OMP_LIST_REDUCTION_LAST)
|
745 |
|
|
{
|
746 |
|
|
enum tree_code reduction_code;
|
747 |
|
|
switch (list)
|
748 |
|
|
{
|
749 |
|
|
case OMP_LIST_PLUS:
|
750 |
|
|
reduction_code = PLUS_EXPR;
|
751 |
|
|
break;
|
752 |
|
|
case OMP_LIST_MULT:
|
753 |
|
|
reduction_code = MULT_EXPR;
|
754 |
|
|
break;
|
755 |
|
|
case OMP_LIST_SUB:
|
756 |
|
|
reduction_code = MINUS_EXPR;
|
757 |
|
|
break;
|
758 |
|
|
case OMP_LIST_AND:
|
759 |
|
|
reduction_code = TRUTH_ANDIF_EXPR;
|
760 |
|
|
break;
|
761 |
|
|
case OMP_LIST_OR:
|
762 |
|
|
reduction_code = TRUTH_ORIF_EXPR;
|
763 |
|
|
break;
|
764 |
|
|
case OMP_LIST_EQV:
|
765 |
|
|
reduction_code = EQ_EXPR;
|
766 |
|
|
break;
|
767 |
|
|
case OMP_LIST_NEQV:
|
768 |
|
|
reduction_code = NE_EXPR;
|
769 |
|
|
break;
|
770 |
|
|
case OMP_LIST_MAX:
|
771 |
|
|
reduction_code = MAX_EXPR;
|
772 |
|
|
break;
|
773 |
|
|
case OMP_LIST_MIN:
|
774 |
|
|
reduction_code = MIN_EXPR;
|
775 |
|
|
break;
|
776 |
|
|
case OMP_LIST_IAND:
|
777 |
|
|
reduction_code = BIT_AND_EXPR;
|
778 |
|
|
break;
|
779 |
|
|
case OMP_LIST_IOR:
|
780 |
|
|
reduction_code = BIT_IOR_EXPR;
|
781 |
|
|
break;
|
782 |
|
|
case OMP_LIST_IEOR:
|
783 |
|
|
reduction_code = BIT_XOR_EXPR;
|
784 |
|
|
break;
|
785 |
|
|
default:
|
786 |
|
|
gcc_unreachable ();
|
787 |
|
|
}
|
788 |
|
|
omp_clauses
|
789 |
|
|
= gfc_trans_omp_reduction_list (n, omp_clauses, reduction_code,
|
790 |
|
|
where);
|
791 |
|
|
continue;
|
792 |
|
|
}
|
793 |
|
|
switch (list)
|
794 |
|
|
{
|
795 |
|
|
case OMP_LIST_PRIVATE:
|
796 |
|
|
clause_code = OMP_CLAUSE_PRIVATE;
|
797 |
|
|
goto add_clause;
|
798 |
|
|
case OMP_LIST_SHARED:
|
799 |
|
|
clause_code = OMP_CLAUSE_SHARED;
|
800 |
|
|
goto add_clause;
|
801 |
|
|
case OMP_LIST_FIRSTPRIVATE:
|
802 |
|
|
clause_code = OMP_CLAUSE_FIRSTPRIVATE;
|
803 |
|
|
goto add_clause;
|
804 |
|
|
case OMP_LIST_LASTPRIVATE:
|
805 |
|
|
clause_code = OMP_CLAUSE_LASTPRIVATE;
|
806 |
|
|
goto add_clause;
|
807 |
|
|
case OMP_LIST_COPYIN:
|
808 |
|
|
clause_code = OMP_CLAUSE_COPYIN;
|
809 |
|
|
goto add_clause;
|
810 |
|
|
case OMP_LIST_COPYPRIVATE:
|
811 |
|
|
clause_code = OMP_CLAUSE_COPYPRIVATE;
|
812 |
|
|
/* FALLTHROUGH */
|
813 |
|
|
add_clause:
|
814 |
|
|
omp_clauses
|
815 |
|
|
= gfc_trans_omp_variable_list (clause_code, n, omp_clauses);
|
816 |
|
|
break;
|
817 |
|
|
default:
|
818 |
|
|
break;
|
819 |
|
|
}
|
820 |
|
|
}
|
821 |
|
|
|
822 |
|
|
if (clauses->if_expr)
|
823 |
|
|
{
|
824 |
|
|
tree if_var;
|
825 |
|
|
|
826 |
|
|
gfc_init_se (&se, NULL);
|
827 |
|
|
gfc_conv_expr (&se, clauses->if_expr);
|
828 |
|
|
gfc_add_block_to_block (block, &se.pre);
|
829 |
|
|
if_var = gfc_evaluate_now (se.expr, block);
|
830 |
|
|
gfc_add_block_to_block (block, &se.post);
|
831 |
|
|
|
832 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_IF);
|
833 |
|
|
OMP_CLAUSE_IF_EXPR (c) = if_var;
|
834 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
835 |
|
|
}
|
836 |
|
|
|
837 |
|
|
if (clauses->num_threads)
|
838 |
|
|
{
|
839 |
|
|
tree num_threads;
|
840 |
|
|
|
841 |
|
|
gfc_init_se (&se, NULL);
|
842 |
|
|
gfc_conv_expr (&se, clauses->num_threads);
|
843 |
|
|
gfc_add_block_to_block (block, &se.pre);
|
844 |
|
|
num_threads = gfc_evaluate_now (se.expr, block);
|
845 |
|
|
gfc_add_block_to_block (block, &se.post);
|
846 |
|
|
|
847 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_THREADS);
|
848 |
|
|
OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
|
849 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
850 |
|
|
}
|
851 |
|
|
|
852 |
|
|
chunk_size = NULL_TREE;
|
853 |
|
|
if (clauses->chunk_size)
|
854 |
|
|
{
|
855 |
|
|
gfc_init_se (&se, NULL);
|
856 |
|
|
gfc_conv_expr (&se, clauses->chunk_size);
|
857 |
|
|
gfc_add_block_to_block (block, &se.pre);
|
858 |
|
|
chunk_size = gfc_evaluate_now (se.expr, block);
|
859 |
|
|
gfc_add_block_to_block (block, &se.post);
|
860 |
|
|
}
|
861 |
|
|
|
862 |
|
|
if (clauses->sched_kind != OMP_SCHED_NONE)
|
863 |
|
|
{
|
864 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_SCHEDULE);
|
865 |
|
|
OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
|
866 |
|
|
switch (clauses->sched_kind)
|
867 |
|
|
{
|
868 |
|
|
case OMP_SCHED_STATIC:
|
869 |
|
|
OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
|
870 |
|
|
break;
|
871 |
|
|
case OMP_SCHED_DYNAMIC:
|
872 |
|
|
OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
|
873 |
|
|
break;
|
874 |
|
|
case OMP_SCHED_GUIDED:
|
875 |
|
|
OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
|
876 |
|
|
break;
|
877 |
|
|
case OMP_SCHED_RUNTIME:
|
878 |
|
|
OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
|
879 |
|
|
break;
|
880 |
|
|
case OMP_SCHED_AUTO:
|
881 |
|
|
OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
|
882 |
|
|
break;
|
883 |
|
|
default:
|
884 |
|
|
gcc_unreachable ();
|
885 |
|
|
}
|
886 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
887 |
|
|
}
|
888 |
|
|
|
889 |
|
|
if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
|
890 |
|
|
{
|
891 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEFAULT);
|
892 |
|
|
switch (clauses->default_sharing)
|
893 |
|
|
{
|
894 |
|
|
case OMP_DEFAULT_NONE:
|
895 |
|
|
OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
|
896 |
|
|
break;
|
897 |
|
|
case OMP_DEFAULT_SHARED:
|
898 |
|
|
OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
|
899 |
|
|
break;
|
900 |
|
|
case OMP_DEFAULT_PRIVATE:
|
901 |
|
|
OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
|
902 |
|
|
break;
|
903 |
|
|
case OMP_DEFAULT_FIRSTPRIVATE:
|
904 |
|
|
OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
|
905 |
|
|
break;
|
906 |
|
|
default:
|
907 |
|
|
gcc_unreachable ();
|
908 |
|
|
}
|
909 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
910 |
|
|
}
|
911 |
|
|
|
912 |
|
|
if (clauses->nowait)
|
913 |
|
|
{
|
914 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOWAIT);
|
915 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
916 |
|
|
}
|
917 |
|
|
|
918 |
|
|
if (clauses->ordered)
|
919 |
|
|
{
|
920 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
|
921 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
922 |
|
|
}
|
923 |
|
|
|
924 |
|
|
if (clauses->untied)
|
925 |
|
|
{
|
926 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_UNTIED);
|
927 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
928 |
|
|
}
|
929 |
|
|
|
930 |
|
|
if (clauses->collapse)
|
931 |
|
|
{
|
932 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_COLLAPSE);
|
933 |
|
|
OMP_CLAUSE_COLLAPSE_EXPR (c) = build_int_cst (NULL, clauses->collapse);
|
934 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
935 |
|
|
}
|
936 |
|
|
|
937 |
|
|
return omp_clauses;
|
938 |
|
|
}
|
939 |
|
|
|
940 |
|
|
/* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
|
941 |
|
|
|
942 |
|
|
static tree
|
943 |
|
|
gfc_trans_omp_code (gfc_code *code, bool force_empty)
|
944 |
|
|
{
|
945 |
|
|
tree stmt;
|
946 |
|
|
|
947 |
|
|
pushlevel (0);
|
948 |
|
|
stmt = gfc_trans_code (code);
|
949 |
|
|
if (TREE_CODE (stmt) != BIND_EXPR)
|
950 |
|
|
{
|
951 |
|
|
if (!IS_EMPTY_STMT (stmt) || force_empty)
|
952 |
|
|
{
|
953 |
|
|
tree block = poplevel (1, 0, 0);
|
954 |
|
|
stmt = build3_v (BIND_EXPR, NULL, stmt, block);
|
955 |
|
|
}
|
956 |
|
|
else
|
957 |
|
|
poplevel (0, 0, 0);
|
958 |
|
|
}
|
959 |
|
|
else
|
960 |
|
|
poplevel (0, 0, 0);
|
961 |
|
|
return stmt;
|
962 |
|
|
}
|
963 |
|
|
|
964 |
|
|
|
965 |
|
|
static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
|
966 |
|
|
static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
|
967 |
|
|
|
968 |
|
|
static tree
|
969 |
|
|
gfc_trans_omp_atomic (gfc_code *code)
|
970 |
|
|
{
|
971 |
|
|
gfc_se lse;
|
972 |
|
|
gfc_se rse;
|
973 |
|
|
gfc_expr *expr2, *e;
|
974 |
|
|
gfc_symbol *var;
|
975 |
|
|
stmtblock_t block;
|
976 |
|
|
tree lhsaddr, type, rhs, x;
|
977 |
|
|
enum tree_code op = ERROR_MARK;
|
978 |
|
|
bool var_on_left = false;
|
979 |
|
|
|
980 |
|
|
code = code->block->next;
|
981 |
|
|
gcc_assert (code->op == EXEC_ASSIGN);
|
982 |
|
|
gcc_assert (code->next == NULL);
|
983 |
|
|
var = code->expr1->symtree->n.sym;
|
984 |
|
|
|
985 |
|
|
gfc_init_se (&lse, NULL);
|
986 |
|
|
gfc_init_se (&rse, NULL);
|
987 |
|
|
gfc_start_block (&block);
|
988 |
|
|
|
989 |
|
|
gfc_conv_expr (&lse, code->expr1);
|
990 |
|
|
gfc_add_block_to_block (&block, &lse.pre);
|
991 |
|
|
type = TREE_TYPE (lse.expr);
|
992 |
|
|
lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
|
993 |
|
|
|
994 |
|
|
expr2 = code->expr2;
|
995 |
|
|
if (expr2->expr_type == EXPR_FUNCTION
|
996 |
|
|
&& expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
|
997 |
|
|
expr2 = expr2->value.function.actual->expr;
|
998 |
|
|
|
999 |
|
|
if (expr2->expr_type == EXPR_OP)
|
1000 |
|
|
{
|
1001 |
|
|
gfc_expr *e;
|
1002 |
|
|
switch (expr2->value.op.op)
|
1003 |
|
|
{
|
1004 |
|
|
case INTRINSIC_PLUS:
|
1005 |
|
|
op = PLUS_EXPR;
|
1006 |
|
|
break;
|
1007 |
|
|
case INTRINSIC_TIMES:
|
1008 |
|
|
op = MULT_EXPR;
|
1009 |
|
|
break;
|
1010 |
|
|
case INTRINSIC_MINUS:
|
1011 |
|
|
op = MINUS_EXPR;
|
1012 |
|
|
break;
|
1013 |
|
|
case INTRINSIC_DIVIDE:
|
1014 |
|
|
if (expr2->ts.type == BT_INTEGER)
|
1015 |
|
|
op = TRUNC_DIV_EXPR;
|
1016 |
|
|
else
|
1017 |
|
|
op = RDIV_EXPR;
|
1018 |
|
|
break;
|
1019 |
|
|
case INTRINSIC_AND:
|
1020 |
|
|
op = TRUTH_ANDIF_EXPR;
|
1021 |
|
|
break;
|
1022 |
|
|
case INTRINSIC_OR:
|
1023 |
|
|
op = TRUTH_ORIF_EXPR;
|
1024 |
|
|
break;
|
1025 |
|
|
case INTRINSIC_EQV:
|
1026 |
|
|
op = EQ_EXPR;
|
1027 |
|
|
break;
|
1028 |
|
|
case INTRINSIC_NEQV:
|
1029 |
|
|
op = NE_EXPR;
|
1030 |
|
|
break;
|
1031 |
|
|
default:
|
1032 |
|
|
gcc_unreachable ();
|
1033 |
|
|
}
|
1034 |
|
|
e = expr2->value.op.op1;
|
1035 |
|
|
if (e->expr_type == EXPR_FUNCTION
|
1036 |
|
|
&& e->value.function.isym->id == GFC_ISYM_CONVERSION)
|
1037 |
|
|
e = e->value.function.actual->expr;
|
1038 |
|
|
if (e->expr_type == EXPR_VARIABLE
|
1039 |
|
|
&& e->symtree != NULL
|
1040 |
|
|
&& e->symtree->n.sym == var)
|
1041 |
|
|
{
|
1042 |
|
|
expr2 = expr2->value.op.op2;
|
1043 |
|
|
var_on_left = true;
|
1044 |
|
|
}
|
1045 |
|
|
else
|
1046 |
|
|
{
|
1047 |
|
|
e = expr2->value.op.op2;
|
1048 |
|
|
if (e->expr_type == EXPR_FUNCTION
|
1049 |
|
|
&& e->value.function.isym->id == GFC_ISYM_CONVERSION)
|
1050 |
|
|
e = e->value.function.actual->expr;
|
1051 |
|
|
gcc_assert (e->expr_type == EXPR_VARIABLE
|
1052 |
|
|
&& e->symtree != NULL
|
1053 |
|
|
&& e->symtree->n.sym == var);
|
1054 |
|
|
expr2 = expr2->value.op.op1;
|
1055 |
|
|
var_on_left = false;
|
1056 |
|
|
}
|
1057 |
|
|
gfc_conv_expr (&rse, expr2);
|
1058 |
|
|
gfc_add_block_to_block (&block, &rse.pre);
|
1059 |
|
|
}
|
1060 |
|
|
else
|
1061 |
|
|
{
|
1062 |
|
|
gcc_assert (expr2->expr_type == EXPR_FUNCTION);
|
1063 |
|
|
switch (expr2->value.function.isym->id)
|
1064 |
|
|
{
|
1065 |
|
|
case GFC_ISYM_MIN:
|
1066 |
|
|
op = MIN_EXPR;
|
1067 |
|
|
break;
|
1068 |
|
|
case GFC_ISYM_MAX:
|
1069 |
|
|
op = MAX_EXPR;
|
1070 |
|
|
break;
|
1071 |
|
|
case GFC_ISYM_IAND:
|
1072 |
|
|
op = BIT_AND_EXPR;
|
1073 |
|
|
break;
|
1074 |
|
|
case GFC_ISYM_IOR:
|
1075 |
|
|
op = BIT_IOR_EXPR;
|
1076 |
|
|
break;
|
1077 |
|
|
case GFC_ISYM_IEOR:
|
1078 |
|
|
op = BIT_XOR_EXPR;
|
1079 |
|
|
break;
|
1080 |
|
|
default:
|
1081 |
|
|
gcc_unreachable ();
|
1082 |
|
|
}
|
1083 |
|
|
e = expr2->value.function.actual->expr;
|
1084 |
|
|
gcc_assert (e->expr_type == EXPR_VARIABLE
|
1085 |
|
|
&& e->symtree != NULL
|
1086 |
|
|
&& e->symtree->n.sym == var);
|
1087 |
|
|
|
1088 |
|
|
gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
|
1089 |
|
|
gfc_add_block_to_block (&block, &rse.pre);
|
1090 |
|
|
if (expr2->value.function.actual->next->next != NULL)
|
1091 |
|
|
{
|
1092 |
|
|
tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
|
1093 |
|
|
gfc_actual_arglist *arg;
|
1094 |
|
|
|
1095 |
|
|
gfc_add_modify (&block, accum, rse.expr);
|
1096 |
|
|
for (arg = expr2->value.function.actual->next->next; arg;
|
1097 |
|
|
arg = arg->next)
|
1098 |
|
|
{
|
1099 |
|
|
gfc_init_block (&rse.pre);
|
1100 |
|
|
gfc_conv_expr (&rse, arg->expr);
|
1101 |
|
|
gfc_add_block_to_block (&block, &rse.pre);
|
1102 |
|
|
x = fold_build2 (op, TREE_TYPE (accum), accum, rse.expr);
|
1103 |
|
|
gfc_add_modify (&block, accum, x);
|
1104 |
|
|
}
|
1105 |
|
|
|
1106 |
|
|
rse.expr = accum;
|
1107 |
|
|
}
|
1108 |
|
|
|
1109 |
|
|
expr2 = expr2->value.function.actual->next->expr;
|
1110 |
|
|
}
|
1111 |
|
|
|
1112 |
|
|
lhsaddr = save_expr (lhsaddr);
|
1113 |
|
|
rhs = gfc_evaluate_now (rse.expr, &block);
|
1114 |
|
|
x = convert (TREE_TYPE (rhs), build_fold_indirect_ref_loc (input_location,
|
1115 |
|
|
lhsaddr));
|
1116 |
|
|
|
1117 |
|
|
if (var_on_left)
|
1118 |
|
|
x = fold_build2 (op, TREE_TYPE (rhs), x, rhs);
|
1119 |
|
|
else
|
1120 |
|
|
x = fold_build2 (op, TREE_TYPE (rhs), rhs, x);
|
1121 |
|
|
|
1122 |
|
|
if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
|
1123 |
|
|
&& TREE_CODE (type) != COMPLEX_TYPE)
|
1124 |
|
|
x = fold_build1 (REALPART_EXPR, TREE_TYPE (TREE_TYPE (rhs)), x);
|
1125 |
|
|
|
1126 |
|
|
x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
|
1127 |
|
|
gfc_add_expr_to_block (&block, x);
|
1128 |
|
|
|
1129 |
|
|
gfc_add_block_to_block (&block, &lse.pre);
|
1130 |
|
|
gfc_add_block_to_block (&block, &rse.pre);
|
1131 |
|
|
|
1132 |
|
|
return gfc_finish_block (&block);
|
1133 |
|
|
}
|
1134 |
|
|
|
1135 |
|
|
static tree
|
1136 |
|
|
gfc_trans_omp_barrier (void)
|
1137 |
|
|
{
|
1138 |
|
|
tree decl = built_in_decls [BUILT_IN_GOMP_BARRIER];
|
1139 |
|
|
return build_call_expr_loc (input_location, decl, 0);
|
1140 |
|
|
}
|
1141 |
|
|
|
1142 |
|
|
static tree
|
1143 |
|
|
gfc_trans_omp_critical (gfc_code *code)
|
1144 |
|
|
{
|
1145 |
|
|
tree name = NULL_TREE, stmt;
|
1146 |
|
|
if (code->ext.omp_name != NULL)
|
1147 |
|
|
name = get_identifier (code->ext.omp_name);
|
1148 |
|
|
stmt = gfc_trans_code (code->block->next);
|
1149 |
|
|
return build2 (OMP_CRITICAL, void_type_node, stmt, name);
|
1150 |
|
|
}
|
1151 |
|
|
|
1152 |
|
|
static tree
|
1153 |
|
|
gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
|
1154 |
|
|
gfc_omp_clauses *do_clauses, tree par_clauses)
|
1155 |
|
|
{
|
1156 |
|
|
gfc_se se;
|
1157 |
|
|
tree dovar, stmt, from, to, step, type, init, cond, incr;
|
1158 |
|
|
tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
|
1159 |
|
|
stmtblock_t block;
|
1160 |
|
|
stmtblock_t body;
|
1161 |
|
|
gfc_omp_clauses *clauses = code->ext.omp_clauses;
|
1162 |
|
|
int i, collapse = clauses->collapse;
|
1163 |
|
|
tree dovar_init = NULL_TREE;
|
1164 |
|
|
|
1165 |
|
|
if (collapse <= 0)
|
1166 |
|
|
collapse = 1;
|
1167 |
|
|
|
1168 |
|
|
code = code->block->next;
|
1169 |
|
|
gcc_assert (code->op == EXEC_DO);
|
1170 |
|
|
|
1171 |
|
|
init = make_tree_vec (collapse);
|
1172 |
|
|
cond = make_tree_vec (collapse);
|
1173 |
|
|
incr = make_tree_vec (collapse);
|
1174 |
|
|
|
1175 |
|
|
if (pblock == NULL)
|
1176 |
|
|
{
|
1177 |
|
|
gfc_start_block (&block);
|
1178 |
|
|
pblock = █
|
1179 |
|
|
}
|
1180 |
|
|
|
1181 |
|
|
omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
|
1182 |
|
|
|
1183 |
|
|
for (i = 0; i < collapse; i++)
|
1184 |
|
|
{
|
1185 |
|
|
int simple = 0;
|
1186 |
|
|
int dovar_found = 0;
|
1187 |
|
|
tree dovar_decl;
|
1188 |
|
|
|
1189 |
|
|
if (clauses)
|
1190 |
|
|
{
|
1191 |
|
|
gfc_namelist *n;
|
1192 |
|
|
for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL;
|
1193 |
|
|
n = n->next)
|
1194 |
|
|
if (code->ext.iterator->var->symtree->n.sym == n->sym)
|
1195 |
|
|
break;
|
1196 |
|
|
if (n != NULL)
|
1197 |
|
|
dovar_found = 1;
|
1198 |
|
|
else if (n == NULL)
|
1199 |
|
|
for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
|
1200 |
|
|
if (code->ext.iterator->var->symtree->n.sym == n->sym)
|
1201 |
|
|
break;
|
1202 |
|
|
if (n != NULL)
|
1203 |
|
|
dovar_found++;
|
1204 |
|
|
}
|
1205 |
|
|
|
1206 |
|
|
/* Evaluate all the expressions in the iterator. */
|
1207 |
|
|
gfc_init_se (&se, NULL);
|
1208 |
|
|
gfc_conv_expr_lhs (&se, code->ext.iterator->var);
|
1209 |
|
|
gfc_add_block_to_block (pblock, &se.pre);
|
1210 |
|
|
dovar = se.expr;
|
1211 |
|
|
type = TREE_TYPE (dovar);
|
1212 |
|
|
gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
|
1213 |
|
|
|
1214 |
|
|
gfc_init_se (&se, NULL);
|
1215 |
|
|
gfc_conv_expr_val (&se, code->ext.iterator->start);
|
1216 |
|
|
gfc_add_block_to_block (pblock, &se.pre);
|
1217 |
|
|
from = gfc_evaluate_now (se.expr, pblock);
|
1218 |
|
|
|
1219 |
|
|
gfc_init_se (&se, NULL);
|
1220 |
|
|
gfc_conv_expr_val (&se, code->ext.iterator->end);
|
1221 |
|
|
gfc_add_block_to_block (pblock, &se.pre);
|
1222 |
|
|
to = gfc_evaluate_now (se.expr, pblock);
|
1223 |
|
|
|
1224 |
|
|
gfc_init_se (&se, NULL);
|
1225 |
|
|
gfc_conv_expr_val (&se, code->ext.iterator->step);
|
1226 |
|
|
gfc_add_block_to_block (pblock, &se.pre);
|
1227 |
|
|
step = gfc_evaluate_now (se.expr, pblock);
|
1228 |
|
|
dovar_decl = dovar;
|
1229 |
|
|
|
1230 |
|
|
/* Special case simple loops. */
|
1231 |
|
|
if (TREE_CODE (dovar) == VAR_DECL)
|
1232 |
|
|
{
|
1233 |
|
|
if (integer_onep (step))
|
1234 |
|
|
simple = 1;
|
1235 |
|
|
else if (tree_int_cst_equal (step, integer_minus_one_node))
|
1236 |
|
|
simple = -1;
|
1237 |
|
|
}
|
1238 |
|
|
else
|
1239 |
|
|
dovar_decl
|
1240 |
|
|
= gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym);
|
1241 |
|
|
|
1242 |
|
|
/* Loop body. */
|
1243 |
|
|
if (simple)
|
1244 |
|
|
{
|
1245 |
|
|
TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
|
1246 |
|
|
TREE_VEC_ELT (cond, i) = fold_build2 (simple > 0 ? LE_EXPR : GE_EXPR,
|
1247 |
|
|
boolean_type_node, dovar, to);
|
1248 |
|
|
TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, dovar, step);
|
1249 |
|
|
TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type, dovar,
|
1250 |
|
|
TREE_VEC_ELT (incr, i));
|
1251 |
|
|
}
|
1252 |
|
|
else
|
1253 |
|
|
{
|
1254 |
|
|
/* STEP is not 1 or -1. Use:
|
1255 |
|
|
for (count = 0; count < (to + step - from) / step; count++)
|
1256 |
|
|
{
|
1257 |
|
|
dovar = from + count * step;
|
1258 |
|
|
body;
|
1259 |
|
|
cycle_label:;
|
1260 |
|
|
} */
|
1261 |
|
|
tmp = fold_build2 (MINUS_EXPR, type, step, from);
|
1262 |
|
|
tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
|
1263 |
|
|
tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
|
1264 |
|
|
tmp = gfc_evaluate_now (tmp, pblock);
|
1265 |
|
|
count = gfc_create_var (type, "count");
|
1266 |
|
|
TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
|
1267 |
|
|
build_int_cst (type, 0));
|
1268 |
|
|
TREE_VEC_ELT (cond, i) = fold_build2 (LT_EXPR, boolean_type_node,
|
1269 |
|
|
count, tmp);
|
1270 |
|
|
TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, count,
|
1271 |
|
|
build_int_cst (type, 1));
|
1272 |
|
|
TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type,
|
1273 |
|
|
count, TREE_VEC_ELT (incr, i));
|
1274 |
|
|
|
1275 |
|
|
/* Initialize DOVAR. */
|
1276 |
|
|
tmp = fold_build2 (MULT_EXPR, type, count, step);
|
1277 |
|
|
tmp = fold_build2 (PLUS_EXPR, type, from, tmp);
|
1278 |
|
|
dovar_init = tree_cons (dovar, tmp, dovar_init);
|
1279 |
|
|
}
|
1280 |
|
|
|
1281 |
|
|
if (!dovar_found)
|
1282 |
|
|
{
|
1283 |
|
|
tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
|
1284 |
|
|
OMP_CLAUSE_DECL (tmp) = dovar_decl;
|
1285 |
|
|
omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
|
1286 |
|
|
}
|
1287 |
|
|
else if (dovar_found == 2)
|
1288 |
|
|
{
|
1289 |
|
|
tree c = NULL;
|
1290 |
|
|
|
1291 |
|
|
tmp = NULL;
|
1292 |
|
|
if (!simple)
|
1293 |
|
|
{
|
1294 |
|
|
/* If dovar is lastprivate, but different counter is used,
|
1295 |
|
|
dovar += step needs to be added to
|
1296 |
|
|
OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
|
1297 |
|
|
will have the value on entry of the last loop, rather
|
1298 |
|
|
than value after iterator increment. */
|
1299 |
|
|
tmp = gfc_evaluate_now (step, pblock);
|
1300 |
|
|
tmp = fold_build2 (PLUS_EXPR, type, dovar, tmp);
|
1301 |
|
|
tmp = fold_build2 (MODIFY_EXPR, type, dovar, tmp);
|
1302 |
|
|
for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
|
1303 |
|
|
if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
|
1304 |
|
|
&& OMP_CLAUSE_DECL (c) == dovar_decl)
|
1305 |
|
|
{
|
1306 |
|
|
OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
|
1307 |
|
|
break;
|
1308 |
|
|
}
|
1309 |
|
|
}
|
1310 |
|
|
if (c == NULL && par_clauses != NULL)
|
1311 |
|
|
{
|
1312 |
|
|
for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
|
1313 |
|
|
if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
|
1314 |
|
|
&& OMP_CLAUSE_DECL (c) == dovar_decl)
|
1315 |
|
|
{
|
1316 |
|
|
tree l = build_omp_clause (input_location,
|
1317 |
|
|
OMP_CLAUSE_LASTPRIVATE);
|
1318 |
|
|
OMP_CLAUSE_DECL (l) = dovar_decl;
|
1319 |
|
|
OMP_CLAUSE_CHAIN (l) = omp_clauses;
|
1320 |
|
|
OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
|
1321 |
|
|
omp_clauses = l;
|
1322 |
|
|
OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
|
1323 |
|
|
break;
|
1324 |
|
|
}
|
1325 |
|
|
}
|
1326 |
|
|
gcc_assert (simple || c != NULL);
|
1327 |
|
|
}
|
1328 |
|
|
if (!simple)
|
1329 |
|
|
{
|
1330 |
|
|
tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
|
1331 |
|
|
OMP_CLAUSE_DECL (tmp) = count;
|
1332 |
|
|
omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
|
1333 |
|
|
}
|
1334 |
|
|
|
1335 |
|
|
if (i + 1 < collapse)
|
1336 |
|
|
code = code->block->next;
|
1337 |
|
|
}
|
1338 |
|
|
|
1339 |
|
|
if (pblock != &block)
|
1340 |
|
|
{
|
1341 |
|
|
pushlevel (0);
|
1342 |
|
|
gfc_start_block (&block);
|
1343 |
|
|
}
|
1344 |
|
|
|
1345 |
|
|
gfc_start_block (&body);
|
1346 |
|
|
|
1347 |
|
|
dovar_init = nreverse (dovar_init);
|
1348 |
|
|
while (dovar_init)
|
1349 |
|
|
{
|
1350 |
|
|
gfc_add_modify (&body, TREE_PURPOSE (dovar_init),
|
1351 |
|
|
TREE_VALUE (dovar_init));
|
1352 |
|
|
dovar_init = TREE_CHAIN (dovar_init);
|
1353 |
|
|
}
|
1354 |
|
|
|
1355 |
|
|
/* Cycle statement is implemented with a goto. Exit statement must not be
|
1356 |
|
|
present for this loop. */
|
1357 |
|
|
cycle_label = gfc_build_label_decl (NULL_TREE);
|
1358 |
|
|
|
1359 |
|
|
/* Put these labels where they can be found later. We put the
|
1360 |
|
|
labels in a TREE_LIST node (because TREE_CHAIN is already
|
1361 |
|
|
used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
|
1362 |
|
|
label in TREE_VALUE (backend_decl). */
|
1363 |
|
|
|
1364 |
|
|
code->block->backend_decl = tree_cons (cycle_label, NULL, NULL);
|
1365 |
|
|
|
1366 |
|
|
/* Main loop body. */
|
1367 |
|
|
tmp = gfc_trans_omp_code (code->block->next, true);
|
1368 |
|
|
gfc_add_expr_to_block (&body, tmp);
|
1369 |
|
|
|
1370 |
|
|
/* Label for cycle statements (if needed). */
|
1371 |
|
|
if (TREE_USED (cycle_label))
|
1372 |
|
|
{
|
1373 |
|
|
tmp = build1_v (LABEL_EXPR, cycle_label);
|
1374 |
|
|
gfc_add_expr_to_block (&body, tmp);
|
1375 |
|
|
}
|
1376 |
|
|
|
1377 |
|
|
/* End of loop body. */
|
1378 |
|
|
stmt = make_node (OMP_FOR);
|
1379 |
|
|
|
1380 |
|
|
TREE_TYPE (stmt) = void_type_node;
|
1381 |
|
|
OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
|
1382 |
|
|
OMP_FOR_CLAUSES (stmt) = omp_clauses;
|
1383 |
|
|
OMP_FOR_INIT (stmt) = init;
|
1384 |
|
|
OMP_FOR_COND (stmt) = cond;
|
1385 |
|
|
OMP_FOR_INCR (stmt) = incr;
|
1386 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
1387 |
|
|
|
1388 |
|
|
return gfc_finish_block (&block);
|
1389 |
|
|
}
|
1390 |
|
|
|
1391 |
|
|
static tree
|
1392 |
|
|
gfc_trans_omp_flush (void)
|
1393 |
|
|
{
|
1394 |
|
|
tree decl = built_in_decls [BUILT_IN_SYNCHRONIZE];
|
1395 |
|
|
return build_call_expr_loc (input_location, decl, 0);
|
1396 |
|
|
}
|
1397 |
|
|
|
1398 |
|
|
static tree
|
1399 |
|
|
gfc_trans_omp_master (gfc_code *code)
|
1400 |
|
|
{
|
1401 |
|
|
tree stmt = gfc_trans_code (code->block->next);
|
1402 |
|
|
if (IS_EMPTY_STMT (stmt))
|
1403 |
|
|
return stmt;
|
1404 |
|
|
return build1_v (OMP_MASTER, stmt);
|
1405 |
|
|
}
|
1406 |
|
|
|
1407 |
|
|
static tree
|
1408 |
|
|
gfc_trans_omp_ordered (gfc_code *code)
|
1409 |
|
|
{
|
1410 |
|
|
return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
|
1411 |
|
|
}
|
1412 |
|
|
|
1413 |
|
|
static tree
|
1414 |
|
|
gfc_trans_omp_parallel (gfc_code *code)
|
1415 |
|
|
{
|
1416 |
|
|
stmtblock_t block;
|
1417 |
|
|
tree stmt, omp_clauses;
|
1418 |
|
|
|
1419 |
|
|
gfc_start_block (&block);
|
1420 |
|
|
omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
|
1421 |
|
|
code->loc);
|
1422 |
|
|
stmt = gfc_trans_omp_code (code->block->next, true);
|
1423 |
|
|
stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
|
1424 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
1425 |
|
|
return gfc_finish_block (&block);
|
1426 |
|
|
}
|
1427 |
|
|
|
1428 |
|
|
static tree
|
1429 |
|
|
gfc_trans_omp_parallel_do (gfc_code *code)
|
1430 |
|
|
{
|
1431 |
|
|
stmtblock_t block, *pblock = NULL;
|
1432 |
|
|
gfc_omp_clauses parallel_clauses, do_clauses;
|
1433 |
|
|
tree stmt, omp_clauses = NULL_TREE;
|
1434 |
|
|
|
1435 |
|
|
gfc_start_block (&block);
|
1436 |
|
|
|
1437 |
|
|
memset (&do_clauses, 0, sizeof (do_clauses));
|
1438 |
|
|
if (code->ext.omp_clauses != NULL)
|
1439 |
|
|
{
|
1440 |
|
|
memcpy (¶llel_clauses, code->ext.omp_clauses,
|
1441 |
|
|
sizeof (parallel_clauses));
|
1442 |
|
|
do_clauses.sched_kind = parallel_clauses.sched_kind;
|
1443 |
|
|
do_clauses.chunk_size = parallel_clauses.chunk_size;
|
1444 |
|
|
do_clauses.ordered = parallel_clauses.ordered;
|
1445 |
|
|
do_clauses.collapse = parallel_clauses.collapse;
|
1446 |
|
|
parallel_clauses.sched_kind = OMP_SCHED_NONE;
|
1447 |
|
|
parallel_clauses.chunk_size = NULL;
|
1448 |
|
|
parallel_clauses.ordered = false;
|
1449 |
|
|
parallel_clauses.collapse = 0;
|
1450 |
|
|
omp_clauses = gfc_trans_omp_clauses (&block, ¶llel_clauses,
|
1451 |
|
|
code->loc);
|
1452 |
|
|
}
|
1453 |
|
|
do_clauses.nowait = true;
|
1454 |
|
|
if (!do_clauses.ordered && do_clauses.sched_kind != OMP_SCHED_STATIC)
|
1455 |
|
|
pblock = █
|
1456 |
|
|
else
|
1457 |
|
|
pushlevel (0);
|
1458 |
|
|
stmt = gfc_trans_omp_do (code, pblock, &do_clauses, omp_clauses);
|
1459 |
|
|
if (TREE_CODE (stmt) != BIND_EXPR)
|
1460 |
|
|
stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
|
1461 |
|
|
else
|
1462 |
|
|
poplevel (0, 0, 0);
|
1463 |
|
|
stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
|
1464 |
|
|
OMP_PARALLEL_COMBINED (stmt) = 1;
|
1465 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
1466 |
|
|
return gfc_finish_block (&block);
|
1467 |
|
|
}
|
1468 |
|
|
|
1469 |
|
|
static tree
|
1470 |
|
|
gfc_trans_omp_parallel_sections (gfc_code *code)
|
1471 |
|
|
{
|
1472 |
|
|
stmtblock_t block;
|
1473 |
|
|
gfc_omp_clauses section_clauses;
|
1474 |
|
|
tree stmt, omp_clauses;
|
1475 |
|
|
|
1476 |
|
|
memset (§ion_clauses, 0, sizeof (section_clauses));
|
1477 |
|
|
section_clauses.nowait = true;
|
1478 |
|
|
|
1479 |
|
|
gfc_start_block (&block);
|
1480 |
|
|
omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
|
1481 |
|
|
code->loc);
|
1482 |
|
|
pushlevel (0);
|
1483 |
|
|
stmt = gfc_trans_omp_sections (code, §ion_clauses);
|
1484 |
|
|
if (TREE_CODE (stmt) != BIND_EXPR)
|
1485 |
|
|
stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
|
1486 |
|
|
else
|
1487 |
|
|
poplevel (0, 0, 0);
|
1488 |
|
|
stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
|
1489 |
|
|
OMP_PARALLEL_COMBINED (stmt) = 1;
|
1490 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
1491 |
|
|
return gfc_finish_block (&block);
|
1492 |
|
|
}
|
1493 |
|
|
|
1494 |
|
|
static tree
|
1495 |
|
|
gfc_trans_omp_parallel_workshare (gfc_code *code)
|
1496 |
|
|
{
|
1497 |
|
|
stmtblock_t block;
|
1498 |
|
|
gfc_omp_clauses workshare_clauses;
|
1499 |
|
|
tree stmt, omp_clauses;
|
1500 |
|
|
|
1501 |
|
|
memset (&workshare_clauses, 0, sizeof (workshare_clauses));
|
1502 |
|
|
workshare_clauses.nowait = true;
|
1503 |
|
|
|
1504 |
|
|
gfc_start_block (&block);
|
1505 |
|
|
omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
|
1506 |
|
|
code->loc);
|
1507 |
|
|
pushlevel (0);
|
1508 |
|
|
stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
|
1509 |
|
|
if (TREE_CODE (stmt) != BIND_EXPR)
|
1510 |
|
|
stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
|
1511 |
|
|
else
|
1512 |
|
|
poplevel (0, 0, 0);
|
1513 |
|
|
stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
|
1514 |
|
|
OMP_PARALLEL_COMBINED (stmt) = 1;
|
1515 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
1516 |
|
|
return gfc_finish_block (&block);
|
1517 |
|
|
}
|
1518 |
|
|
|
1519 |
|
|
static tree
|
1520 |
|
|
gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
|
1521 |
|
|
{
|
1522 |
|
|
stmtblock_t block, body;
|
1523 |
|
|
tree omp_clauses, stmt;
|
1524 |
|
|
bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
|
1525 |
|
|
|
1526 |
|
|
gfc_start_block (&block);
|
1527 |
|
|
|
1528 |
|
|
omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
|
1529 |
|
|
|
1530 |
|
|
gfc_init_block (&body);
|
1531 |
|
|
for (code = code->block; code; code = code->block)
|
1532 |
|
|
{
|
1533 |
|
|
/* Last section is special because of lastprivate, so even if it
|
1534 |
|
|
is empty, chain it in. */
|
1535 |
|
|
stmt = gfc_trans_omp_code (code->next,
|
1536 |
|
|
has_lastprivate && code->block == NULL);
|
1537 |
|
|
if (! IS_EMPTY_STMT (stmt))
|
1538 |
|
|
{
|
1539 |
|
|
stmt = build1_v (OMP_SECTION, stmt);
|
1540 |
|
|
gfc_add_expr_to_block (&body, stmt);
|
1541 |
|
|
}
|
1542 |
|
|
}
|
1543 |
|
|
stmt = gfc_finish_block (&body);
|
1544 |
|
|
|
1545 |
|
|
stmt = build2 (OMP_SECTIONS, void_type_node, stmt, omp_clauses);
|
1546 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
1547 |
|
|
|
1548 |
|
|
return gfc_finish_block (&block);
|
1549 |
|
|
}
|
1550 |
|
|
|
1551 |
|
|
static tree
|
1552 |
|
|
gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
|
1553 |
|
|
{
|
1554 |
|
|
tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
|
1555 |
|
|
tree stmt = gfc_trans_omp_code (code->block->next, true);
|
1556 |
|
|
stmt = build2 (OMP_SINGLE, void_type_node, stmt, omp_clauses);
|
1557 |
|
|
return stmt;
|
1558 |
|
|
}
|
1559 |
|
|
|
1560 |
|
|
static tree
|
1561 |
|
|
gfc_trans_omp_task (gfc_code *code)
|
1562 |
|
|
{
|
1563 |
|
|
stmtblock_t block;
|
1564 |
|
|
tree stmt, omp_clauses;
|
1565 |
|
|
|
1566 |
|
|
gfc_start_block (&block);
|
1567 |
|
|
omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
|
1568 |
|
|
code->loc);
|
1569 |
|
|
stmt = gfc_trans_omp_code (code->block->next, true);
|
1570 |
|
|
stmt = build2 (OMP_TASK, void_type_node, stmt, omp_clauses);
|
1571 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
1572 |
|
|
return gfc_finish_block (&block);
|
1573 |
|
|
}
|
1574 |
|
|
|
1575 |
|
|
static tree
|
1576 |
|
|
gfc_trans_omp_taskwait (void)
|
1577 |
|
|
{
|
1578 |
|
|
tree decl = built_in_decls [BUILT_IN_GOMP_TASKWAIT];
|
1579 |
|
|
return build_call_expr_loc (input_location, decl, 0);
|
1580 |
|
|
}
|
1581 |
|
|
|
1582 |
|
|
static tree
|
1583 |
|
|
gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
|
1584 |
|
|
{
|
1585 |
|
|
tree res, tmp, stmt;
|
1586 |
|
|
stmtblock_t block, *pblock = NULL;
|
1587 |
|
|
stmtblock_t singleblock;
|
1588 |
|
|
int saved_ompws_flags;
|
1589 |
|
|
bool singleblock_in_progress = false;
|
1590 |
|
|
/* True if previous gfc_code in workshare construct is not workshared. */
|
1591 |
|
|
bool prev_singleunit;
|
1592 |
|
|
|
1593 |
|
|
code = code->block->next;
|
1594 |
|
|
|
1595 |
|
|
pushlevel (0);
|
1596 |
|
|
|
1597 |
|
|
if (!code)
|
1598 |
|
|
return build_empty_stmt (input_location);
|
1599 |
|
|
|
1600 |
|
|
gfc_start_block (&block);
|
1601 |
|
|
pblock = █
|
1602 |
|
|
|
1603 |
|
|
ompws_flags = OMPWS_WORKSHARE_FLAG;
|
1604 |
|
|
prev_singleunit = false;
|
1605 |
|
|
|
1606 |
|
|
/* Translate statements one by one to trees until we reach
|
1607 |
|
|
the end of the workshare construct. Adjacent gfc_codes that
|
1608 |
|
|
are a single unit of work are clustered and encapsulated in a
|
1609 |
|
|
single OMP_SINGLE construct. */
|
1610 |
|
|
for (; code; code = code->next)
|
1611 |
|
|
{
|
1612 |
|
|
if (code->here != 0)
|
1613 |
|
|
{
|
1614 |
|
|
res = gfc_trans_label_here (code);
|
1615 |
|
|
gfc_add_expr_to_block (pblock, res);
|
1616 |
|
|
}
|
1617 |
|
|
|
1618 |
|
|
/* No dependence analysis, use for clauses with wait.
|
1619 |
|
|
If this is the last gfc_code, use default omp_clauses. */
|
1620 |
|
|
if (code->next == NULL && clauses->nowait)
|
1621 |
|
|
ompws_flags |= OMPWS_NOWAIT;
|
1622 |
|
|
|
1623 |
|
|
/* By default, every gfc_code is a single unit of work. */
|
1624 |
|
|
ompws_flags |= OMPWS_CURR_SINGLEUNIT;
|
1625 |
|
|
ompws_flags &= ~OMPWS_SCALARIZER_WS;
|
1626 |
|
|
|
1627 |
|
|
switch (code->op)
|
1628 |
|
|
{
|
1629 |
|
|
case EXEC_NOP:
|
1630 |
|
|
res = NULL_TREE;
|
1631 |
|
|
break;
|
1632 |
|
|
|
1633 |
|
|
case EXEC_ASSIGN:
|
1634 |
|
|
res = gfc_trans_assign (code);
|
1635 |
|
|
break;
|
1636 |
|
|
|
1637 |
|
|
case EXEC_POINTER_ASSIGN:
|
1638 |
|
|
res = gfc_trans_pointer_assign (code);
|
1639 |
|
|
break;
|
1640 |
|
|
|
1641 |
|
|
case EXEC_INIT_ASSIGN:
|
1642 |
|
|
res = gfc_trans_init_assign (code);
|
1643 |
|
|
break;
|
1644 |
|
|
|
1645 |
|
|
case EXEC_FORALL:
|
1646 |
|
|
res = gfc_trans_forall (code);
|
1647 |
|
|
break;
|
1648 |
|
|
|
1649 |
|
|
case EXEC_WHERE:
|
1650 |
|
|
res = gfc_trans_where (code);
|
1651 |
|
|
break;
|
1652 |
|
|
|
1653 |
|
|
case EXEC_OMP_ATOMIC:
|
1654 |
|
|
res = gfc_trans_omp_directive (code);
|
1655 |
|
|
break;
|
1656 |
|
|
|
1657 |
|
|
case EXEC_OMP_PARALLEL:
|
1658 |
|
|
case EXEC_OMP_PARALLEL_DO:
|
1659 |
|
|
case EXEC_OMP_PARALLEL_SECTIONS:
|
1660 |
|
|
case EXEC_OMP_PARALLEL_WORKSHARE:
|
1661 |
|
|
case EXEC_OMP_CRITICAL:
|
1662 |
|
|
saved_ompws_flags = ompws_flags;
|
1663 |
|
|
ompws_flags = 0;
|
1664 |
|
|
res = gfc_trans_omp_directive (code);
|
1665 |
|
|
ompws_flags = saved_ompws_flags;
|
1666 |
|
|
break;
|
1667 |
|
|
|
1668 |
|
|
default:
|
1669 |
|
|
internal_error ("gfc_trans_omp_workshare(): Bad statement code");
|
1670 |
|
|
}
|
1671 |
|
|
|
1672 |
|
|
gfc_set_backend_locus (&code->loc);
|
1673 |
|
|
|
1674 |
|
|
if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
|
1675 |
|
|
{
|
1676 |
|
|
if (prev_singleunit)
|
1677 |
|
|
{
|
1678 |
|
|
if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
|
1679 |
|
|
/* Add current gfc_code to single block. */
|
1680 |
|
|
gfc_add_expr_to_block (&singleblock, res);
|
1681 |
|
|
else
|
1682 |
|
|
{
|
1683 |
|
|
/* Finish single block and add it to pblock. */
|
1684 |
|
|
tmp = gfc_finish_block (&singleblock);
|
1685 |
|
|
tmp = build2 (OMP_SINGLE, void_type_node, tmp, NULL_TREE);
|
1686 |
|
|
gfc_add_expr_to_block (pblock, tmp);
|
1687 |
|
|
/* Add current gfc_code to pblock. */
|
1688 |
|
|
gfc_add_expr_to_block (pblock, res);
|
1689 |
|
|
singleblock_in_progress = false;
|
1690 |
|
|
}
|
1691 |
|
|
}
|
1692 |
|
|
else
|
1693 |
|
|
{
|
1694 |
|
|
if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
|
1695 |
|
|
{
|
1696 |
|
|
/* Start single block. */
|
1697 |
|
|
gfc_init_block (&singleblock);
|
1698 |
|
|
gfc_add_expr_to_block (&singleblock, res);
|
1699 |
|
|
singleblock_in_progress = true;
|
1700 |
|
|
}
|
1701 |
|
|
else
|
1702 |
|
|
/* Add the new statement to the block. */
|
1703 |
|
|
gfc_add_expr_to_block (pblock, res);
|
1704 |
|
|
}
|
1705 |
|
|
prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0;
|
1706 |
|
|
}
|
1707 |
|
|
}
|
1708 |
|
|
|
1709 |
|
|
/* Finish remaining SINGLE block, if we were in the middle of one. */
|
1710 |
|
|
if (singleblock_in_progress)
|
1711 |
|
|
{
|
1712 |
|
|
/* Finish single block and add it to pblock. */
|
1713 |
|
|
tmp = gfc_finish_block (&singleblock);
|
1714 |
|
|
tmp = build2 (OMP_SINGLE, void_type_node, tmp,
|
1715 |
|
|
clauses->nowait
|
1716 |
|
|
? build_omp_clause (input_location, OMP_CLAUSE_NOWAIT)
|
1717 |
|
|
: NULL_TREE);
|
1718 |
|
|
gfc_add_expr_to_block (pblock, tmp);
|
1719 |
|
|
}
|
1720 |
|
|
|
1721 |
|
|
stmt = gfc_finish_block (pblock);
|
1722 |
|
|
if (TREE_CODE (stmt) != BIND_EXPR)
|
1723 |
|
|
{
|
1724 |
|
|
if (!IS_EMPTY_STMT (stmt))
|
1725 |
|
|
{
|
1726 |
|
|
tree bindblock = poplevel (1, 0, 0);
|
1727 |
|
|
stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock);
|
1728 |
|
|
}
|
1729 |
|
|
else
|
1730 |
|
|
poplevel (0, 0, 0);
|
1731 |
|
|
}
|
1732 |
|
|
else
|
1733 |
|
|
poplevel (0, 0, 0);
|
1734 |
|
|
|
1735 |
|
|
ompws_flags = 0;
|
1736 |
|
|
return stmt;
|
1737 |
|
|
}
|
1738 |
|
|
|
1739 |
|
|
tree
|
1740 |
|
|
gfc_trans_omp_directive (gfc_code *code)
|
1741 |
|
|
{
|
1742 |
|
|
switch (code->op)
|
1743 |
|
|
{
|
1744 |
|
|
case EXEC_OMP_ATOMIC:
|
1745 |
|
|
return gfc_trans_omp_atomic (code);
|
1746 |
|
|
case EXEC_OMP_BARRIER:
|
1747 |
|
|
return gfc_trans_omp_barrier ();
|
1748 |
|
|
case EXEC_OMP_CRITICAL:
|
1749 |
|
|
return gfc_trans_omp_critical (code);
|
1750 |
|
|
case EXEC_OMP_DO:
|
1751 |
|
|
return gfc_trans_omp_do (code, NULL, code->ext.omp_clauses, NULL);
|
1752 |
|
|
case EXEC_OMP_FLUSH:
|
1753 |
|
|
return gfc_trans_omp_flush ();
|
1754 |
|
|
case EXEC_OMP_MASTER:
|
1755 |
|
|
return gfc_trans_omp_master (code);
|
1756 |
|
|
case EXEC_OMP_ORDERED:
|
1757 |
|
|
return gfc_trans_omp_ordered (code);
|
1758 |
|
|
case EXEC_OMP_PARALLEL:
|
1759 |
|
|
return gfc_trans_omp_parallel (code);
|
1760 |
|
|
case EXEC_OMP_PARALLEL_DO:
|
1761 |
|
|
return gfc_trans_omp_parallel_do (code);
|
1762 |
|
|
case EXEC_OMP_PARALLEL_SECTIONS:
|
1763 |
|
|
return gfc_trans_omp_parallel_sections (code);
|
1764 |
|
|
case EXEC_OMP_PARALLEL_WORKSHARE:
|
1765 |
|
|
return gfc_trans_omp_parallel_workshare (code);
|
1766 |
|
|
case EXEC_OMP_SECTIONS:
|
1767 |
|
|
return gfc_trans_omp_sections (code, code->ext.omp_clauses);
|
1768 |
|
|
case EXEC_OMP_SINGLE:
|
1769 |
|
|
return gfc_trans_omp_single (code, code->ext.omp_clauses);
|
1770 |
|
|
case EXEC_OMP_TASK:
|
1771 |
|
|
return gfc_trans_omp_task (code);
|
1772 |
|
|
case EXEC_OMP_TASKWAIT:
|
1773 |
|
|
return gfc_trans_omp_taskwait ();
|
1774 |
|
|
case EXEC_OMP_WORKSHARE:
|
1775 |
|
|
return gfc_trans_omp_workshare (code, code->ext.omp_clauses);
|
1776 |
|
|
default:
|
1777 |
|
|
gcc_unreachable ();
|
1778 |
|
|
}
|
1779 |
|
|
}
|