1 |
712 |
jeremybenn |
/* OpenMP directive translation -- generate GCC trees from gfc_code.
|
2 |
|
|
Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
|
3 |
|
|
Free Software Foundation, Inc.
|
4 |
|
|
Contributed by Jakub Jelinek <jakub@redhat.com>
|
5 |
|
|
|
6 |
|
|
This file is part of GCC.
|
7 |
|
|
|
8 |
|
|
GCC is free software; you can redistribute it and/or modify it under
|
9 |
|
|
the terms of the GNU General Public License as published by the Free
|
10 |
|
|
Software Foundation; either version 3, or (at your option) any later
|
11 |
|
|
version.
|
12 |
|
|
|
13 |
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
14 |
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
15 |
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
16 |
|
|
for more details.
|
17 |
|
|
|
18 |
|
|
You should have received a copy of the GNU General Public License
|
19 |
|
|
along with GCC; see the file COPYING3. If not see
|
20 |
|
|
<http://www.gnu.org/licenses/>. */
|
21 |
|
|
|
22 |
|
|
|
23 |
|
|
#include "config.h"
|
24 |
|
|
#include "system.h"
|
25 |
|
|
#include "coretypes.h"
|
26 |
|
|
#include "tree.h"
|
27 |
|
|
#include "gimple.h" /* For create_tmp_var_raw. */
|
28 |
|
|
#include "diagnostic-core.h" /* For internal_error. */
|
29 |
|
|
#include "gfortran.h"
|
30 |
|
|
#include "trans.h"
|
31 |
|
|
#include "trans-stmt.h"
|
32 |
|
|
#include "trans-types.h"
|
33 |
|
|
#include "trans-array.h"
|
34 |
|
|
#include "trans-const.h"
|
35 |
|
|
#include "arith.h"
|
36 |
|
|
|
37 |
|
|
int ompws_flags;
|
38 |
|
|
|
39 |
|
|
/* True if OpenMP should privatize what this DECL points to rather
|
40 |
|
|
than the DECL itself. */
|
41 |
|
|
|
42 |
|
|
bool
|
43 |
|
|
gfc_omp_privatize_by_reference (const_tree decl)
|
44 |
|
|
{
|
45 |
|
|
tree type = TREE_TYPE (decl);
|
46 |
|
|
|
47 |
|
|
if (TREE_CODE (type) == REFERENCE_TYPE
|
48 |
|
|
&& (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
|
49 |
|
|
return true;
|
50 |
|
|
|
51 |
|
|
if (TREE_CODE (type) == POINTER_TYPE)
|
52 |
|
|
{
|
53 |
|
|
/* Array POINTER/ALLOCATABLE have aggregate types, all user variables
|
54 |
|
|
that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P
|
55 |
|
|
set are supposed to be privatized by reference. */
|
56 |
|
|
if (GFC_POINTER_TYPE_P (type))
|
57 |
|
|
return false;
|
58 |
|
|
|
59 |
|
|
if (!DECL_ARTIFICIAL (decl)
|
60 |
|
|
&& TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
|
61 |
|
|
return true;
|
62 |
|
|
|
63 |
|
|
/* Some arrays are expanded as DECL_ARTIFICIAL pointers
|
64 |
|
|
by the frontend. */
|
65 |
|
|
if (DECL_LANG_SPECIFIC (decl)
|
66 |
|
|
&& GFC_DECL_SAVED_DESCRIPTOR (decl))
|
67 |
|
|
return true;
|
68 |
|
|
}
|
69 |
|
|
|
70 |
|
|
return false;
|
71 |
|
|
}
|
72 |
|
|
|
73 |
|
|
/* True if OpenMP sharing attribute of DECL is predetermined. */
|
74 |
|
|
|
75 |
|
|
enum omp_clause_default_kind
|
76 |
|
|
gfc_omp_predetermined_sharing (tree decl)
|
77 |
|
|
{
|
78 |
|
|
if (DECL_ARTIFICIAL (decl)
|
79 |
|
|
&& ! GFC_DECL_RESULT (decl)
|
80 |
|
|
&& ! (DECL_LANG_SPECIFIC (decl)
|
81 |
|
|
&& GFC_DECL_SAVED_DESCRIPTOR (decl)))
|
82 |
|
|
return OMP_CLAUSE_DEFAULT_SHARED;
|
83 |
|
|
|
84 |
|
|
/* Cray pointees shouldn't be listed in any clauses and should be
|
85 |
|
|
gimplified to dereference of the corresponding Cray pointer.
|
86 |
|
|
Make them all private, so that they are emitted in the debug
|
87 |
|
|
information. */
|
88 |
|
|
if (GFC_DECL_CRAY_POINTEE (decl))
|
89 |
|
|
return OMP_CLAUSE_DEFAULT_PRIVATE;
|
90 |
|
|
|
91 |
|
|
/* Assumed-size arrays are predetermined shared. */
|
92 |
|
|
if (TREE_CODE (decl) == PARM_DECL
|
93 |
|
|
&& GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
|
94 |
|
|
&& GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
|
95 |
|
|
&& GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
|
96 |
|
|
GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
|
97 |
|
|
== NULL)
|
98 |
|
|
return OMP_CLAUSE_DEFAULT_SHARED;
|
99 |
|
|
|
100 |
|
|
/* Dummy procedures aren't considered variables by OpenMP, thus are
|
101 |
|
|
disallowed in OpenMP clauses. They are represented as PARM_DECLs
|
102 |
|
|
in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
|
103 |
|
|
to avoid complaining about their uses with default(none). */
|
104 |
|
|
if (TREE_CODE (decl) == PARM_DECL
|
105 |
|
|
&& TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
|
106 |
|
|
&& TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == FUNCTION_TYPE)
|
107 |
|
|
return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
|
108 |
|
|
|
109 |
|
|
/* COMMON and EQUIVALENCE decls are shared. They
|
110 |
|
|
are only referenced through DECL_VALUE_EXPR of the variables
|
111 |
|
|
contained in them. If those are privatized, they will not be
|
112 |
|
|
gimplified to the COMMON or EQUIVALENCE decls. */
|
113 |
|
|
if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
|
114 |
|
|
return OMP_CLAUSE_DEFAULT_SHARED;
|
115 |
|
|
|
116 |
|
|
if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
|
117 |
|
|
return OMP_CLAUSE_DEFAULT_SHARED;
|
118 |
|
|
|
119 |
|
|
return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
|
120 |
|
|
}
|
121 |
|
|
|
122 |
|
|
/* Return decl that should be used when reporting DEFAULT(NONE)
|
123 |
|
|
diagnostics. */
|
124 |
|
|
|
125 |
|
|
tree
|
126 |
|
|
gfc_omp_report_decl (tree decl)
|
127 |
|
|
{
|
128 |
|
|
if (DECL_ARTIFICIAL (decl)
|
129 |
|
|
&& DECL_LANG_SPECIFIC (decl)
|
130 |
|
|
&& GFC_DECL_SAVED_DESCRIPTOR (decl))
|
131 |
|
|
return GFC_DECL_SAVED_DESCRIPTOR (decl);
|
132 |
|
|
|
133 |
|
|
return decl;
|
134 |
|
|
}
|
135 |
|
|
|
136 |
|
|
/* Return true if DECL in private clause needs
|
137 |
|
|
OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
|
138 |
|
|
bool
|
139 |
|
|
gfc_omp_private_outer_ref (tree decl)
|
140 |
|
|
{
|
141 |
|
|
tree type = TREE_TYPE (decl);
|
142 |
|
|
|
143 |
|
|
if (GFC_DESCRIPTOR_TYPE_P (type)
|
144 |
|
|
&& GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
|
145 |
|
|
return true;
|
146 |
|
|
|
147 |
|
|
return false;
|
148 |
|
|
}
|
149 |
|
|
|
150 |
|
|
/* Return code to initialize DECL with its default constructor, or
|
151 |
|
|
NULL if there's nothing to do. */
|
152 |
|
|
|
153 |
|
|
tree
|
154 |
|
|
gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
|
155 |
|
|
{
|
156 |
|
|
tree type = TREE_TYPE (decl), rank, size, esize, ptr, cond, then_b, else_b;
|
157 |
|
|
stmtblock_t block, cond_block;
|
158 |
|
|
|
159 |
|
|
if (! GFC_DESCRIPTOR_TYPE_P (type)
|
160 |
|
|
|| GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
|
161 |
|
|
return NULL;
|
162 |
|
|
|
163 |
|
|
gcc_assert (outer != NULL);
|
164 |
|
|
gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
|
165 |
|
|
|| OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE);
|
166 |
|
|
|
167 |
|
|
/* Allocatable arrays in PRIVATE clauses need to be set to
|
168 |
|
|
"not currently allocated" allocation status if outer
|
169 |
|
|
array is "not currently allocated", otherwise should be allocated. */
|
170 |
|
|
gfc_start_block (&block);
|
171 |
|
|
|
172 |
|
|
gfc_init_block (&cond_block);
|
173 |
|
|
|
174 |
|
|
gfc_add_modify (&cond_block, decl, outer);
|
175 |
|
|
rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
|
176 |
|
|
size = gfc_conv_descriptor_ubound_get (decl, rank);
|
177 |
|
|
size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
|
178 |
|
|
size, gfc_conv_descriptor_lbound_get (decl, rank));
|
179 |
|
|
size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
|
180 |
|
|
size, gfc_index_one_node);
|
181 |
|
|
if (GFC_TYPE_ARRAY_RANK (type) > 1)
|
182 |
|
|
size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
|
183 |
|
|
size, gfc_conv_descriptor_stride_get (decl, rank));
|
184 |
|
|
esize = fold_convert (gfc_array_index_type,
|
185 |
|
|
TYPE_SIZE_UNIT (gfc_get_element_type (type)));
|
186 |
|
|
size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
|
187 |
|
|
size, esize);
|
188 |
|
|
size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
|
189 |
|
|
|
190 |
|
|
ptr = gfc_create_var (pvoid_type_node, NULL);
|
191 |
|
|
gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
|
192 |
|
|
gfc_conv_descriptor_data_set (&cond_block, decl, ptr);
|
193 |
|
|
|
194 |
|
|
then_b = gfc_finish_block (&cond_block);
|
195 |
|
|
|
196 |
|
|
gfc_init_block (&cond_block);
|
197 |
|
|
gfc_conv_descriptor_data_set (&cond_block, decl, null_pointer_node);
|
198 |
|
|
else_b = gfc_finish_block (&cond_block);
|
199 |
|
|
|
200 |
|
|
cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
|
201 |
|
|
fold_convert (pvoid_type_node,
|
202 |
|
|
gfc_conv_descriptor_data_get (outer)),
|
203 |
|
|
null_pointer_node);
|
204 |
|
|
gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
|
205 |
|
|
void_type_node, cond, then_b, else_b));
|
206 |
|
|
|
207 |
|
|
return gfc_finish_block (&block);
|
208 |
|
|
}
|
209 |
|
|
|
210 |
|
|
/* Build and return code for a copy constructor from SRC to DEST. */
|
211 |
|
|
|
212 |
|
|
tree
|
213 |
|
|
gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
|
214 |
|
|
{
|
215 |
|
|
tree type = TREE_TYPE (dest), ptr, size, esize, rank, call;
|
216 |
|
|
tree cond, then_b, else_b;
|
217 |
|
|
stmtblock_t block, cond_block;
|
218 |
|
|
|
219 |
|
|
if (! GFC_DESCRIPTOR_TYPE_P (type)
|
220 |
|
|
|| GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
|
221 |
|
|
return build2_v (MODIFY_EXPR, dest, src);
|
222 |
|
|
|
223 |
|
|
gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE);
|
224 |
|
|
|
225 |
|
|
/* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
|
226 |
|
|
and copied from SRC. */
|
227 |
|
|
gfc_start_block (&block);
|
228 |
|
|
|
229 |
|
|
gfc_init_block (&cond_block);
|
230 |
|
|
|
231 |
|
|
gfc_add_modify (&cond_block, dest, src);
|
232 |
|
|
rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
|
233 |
|
|
size = gfc_conv_descriptor_ubound_get (dest, rank);
|
234 |
|
|
size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
|
235 |
|
|
size, gfc_conv_descriptor_lbound_get (dest, rank));
|
236 |
|
|
size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
|
237 |
|
|
size, gfc_index_one_node);
|
238 |
|
|
if (GFC_TYPE_ARRAY_RANK (type) > 1)
|
239 |
|
|
size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
|
240 |
|
|
size, gfc_conv_descriptor_stride_get (dest, rank));
|
241 |
|
|
esize = fold_convert (gfc_array_index_type,
|
242 |
|
|
TYPE_SIZE_UNIT (gfc_get_element_type (type)));
|
243 |
|
|
size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
|
244 |
|
|
size, esize);
|
245 |
|
|
size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
|
246 |
|
|
|
247 |
|
|
ptr = gfc_create_var (pvoid_type_node, NULL);
|
248 |
|
|
gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
|
249 |
|
|
gfc_conv_descriptor_data_set (&cond_block, dest, ptr);
|
250 |
|
|
|
251 |
|
|
call = build_call_expr_loc (input_location,
|
252 |
|
|
builtin_decl_explicit (BUILT_IN_MEMCPY),
|
253 |
|
|
3, ptr,
|
254 |
|
|
fold_convert (pvoid_type_node,
|
255 |
|
|
gfc_conv_descriptor_data_get (src)),
|
256 |
|
|
size);
|
257 |
|
|
gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
|
258 |
|
|
then_b = gfc_finish_block (&cond_block);
|
259 |
|
|
|
260 |
|
|
gfc_init_block (&cond_block);
|
261 |
|
|
gfc_conv_descriptor_data_set (&cond_block, dest, null_pointer_node);
|
262 |
|
|
else_b = gfc_finish_block (&cond_block);
|
263 |
|
|
|
264 |
|
|
cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
|
265 |
|
|
fold_convert (pvoid_type_node,
|
266 |
|
|
gfc_conv_descriptor_data_get (src)),
|
267 |
|
|
null_pointer_node);
|
268 |
|
|
gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
|
269 |
|
|
void_type_node, cond, then_b, else_b));
|
270 |
|
|
|
271 |
|
|
return gfc_finish_block (&block);
|
272 |
|
|
}
|
273 |
|
|
|
274 |
|
|
/* Similarly, except use an assignment operator instead. */
|
275 |
|
|
|
276 |
|
|
tree
|
277 |
|
|
gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src)
|
278 |
|
|
{
|
279 |
|
|
tree type = TREE_TYPE (dest), rank, size, esize, call;
|
280 |
|
|
stmtblock_t block;
|
281 |
|
|
|
282 |
|
|
if (! GFC_DESCRIPTOR_TYPE_P (type)
|
283 |
|
|
|| GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
|
284 |
|
|
return build2_v (MODIFY_EXPR, dest, src);
|
285 |
|
|
|
286 |
|
|
/* Handle copying allocatable arrays. */
|
287 |
|
|
gfc_start_block (&block);
|
288 |
|
|
|
289 |
|
|
rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
|
290 |
|
|
size = gfc_conv_descriptor_ubound_get (dest, rank);
|
291 |
|
|
size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
|
292 |
|
|
size, gfc_conv_descriptor_lbound_get (dest, rank));
|
293 |
|
|
size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
|
294 |
|
|
size, gfc_index_one_node);
|
295 |
|
|
if (GFC_TYPE_ARRAY_RANK (type) > 1)
|
296 |
|
|
size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
|
297 |
|
|
size, gfc_conv_descriptor_stride_get (dest, rank));
|
298 |
|
|
esize = fold_convert (gfc_array_index_type,
|
299 |
|
|
TYPE_SIZE_UNIT (gfc_get_element_type (type)));
|
300 |
|
|
size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
|
301 |
|
|
size, esize);
|
302 |
|
|
size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
|
303 |
|
|
call = build_call_expr_loc (input_location,
|
304 |
|
|
builtin_decl_explicit (BUILT_IN_MEMCPY), 3,
|
305 |
|
|
fold_convert (pvoid_type_node,
|
306 |
|
|
gfc_conv_descriptor_data_get (dest)),
|
307 |
|
|
fold_convert (pvoid_type_node,
|
308 |
|
|
gfc_conv_descriptor_data_get (src)),
|
309 |
|
|
size);
|
310 |
|
|
gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
|
311 |
|
|
|
312 |
|
|
return gfc_finish_block (&block);
|
313 |
|
|
}
|
314 |
|
|
|
315 |
|
|
/* Build and return code destructing DECL. Return NULL if nothing
|
316 |
|
|
to be done. */
|
317 |
|
|
|
318 |
|
|
tree
|
319 |
|
|
gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED, tree decl)
|
320 |
|
|
{
|
321 |
|
|
tree type = TREE_TYPE (decl);
|
322 |
|
|
|
323 |
|
|
if (! GFC_DESCRIPTOR_TYPE_P (type)
|
324 |
|
|
|| GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
|
325 |
|
|
return NULL;
|
326 |
|
|
|
327 |
|
|
/* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
|
328 |
|
|
to be deallocated if they were allocated. */
|
329 |
|
|
return gfc_trans_dealloc_allocated (decl, false);
|
330 |
|
|
}
|
331 |
|
|
|
332 |
|
|
|
333 |
|
|
/* Return true if DECL's DECL_VALUE_EXPR (if any) should be
|
334 |
|
|
disregarded in OpenMP construct, because it is going to be
|
335 |
|
|
remapped during OpenMP lowering. SHARED is true if DECL
|
336 |
|
|
is going to be shared, false if it is going to be privatized. */
|
337 |
|
|
|
338 |
|
|
bool
|
339 |
|
|
gfc_omp_disregard_value_expr (tree decl, bool shared)
|
340 |
|
|
{
|
341 |
|
|
if (GFC_DECL_COMMON_OR_EQUIV (decl)
|
342 |
|
|
&& DECL_HAS_VALUE_EXPR_P (decl))
|
343 |
|
|
{
|
344 |
|
|
tree value = DECL_VALUE_EXPR (decl);
|
345 |
|
|
|
346 |
|
|
if (TREE_CODE (value) == COMPONENT_REF
|
347 |
|
|
&& TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
|
348 |
|
|
&& GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
|
349 |
|
|
{
|
350 |
|
|
/* If variable in COMMON or EQUIVALENCE is privatized, return
|
351 |
|
|
true, as just that variable is supposed to be privatized,
|
352 |
|
|
not the whole COMMON or whole EQUIVALENCE.
|
353 |
|
|
For shared variables in COMMON or EQUIVALENCE, let them be
|
354 |
|
|
gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
|
355 |
|
|
from the same COMMON or EQUIVALENCE just one sharing of the
|
356 |
|
|
whole COMMON or EQUIVALENCE is enough. */
|
357 |
|
|
return ! shared;
|
358 |
|
|
}
|
359 |
|
|
}
|
360 |
|
|
|
361 |
|
|
if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
|
362 |
|
|
return ! shared;
|
363 |
|
|
|
364 |
|
|
return false;
|
365 |
|
|
}
|
366 |
|
|
|
367 |
|
|
/* Return true if DECL that is shared iff SHARED is true should
|
368 |
|
|
be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
|
369 |
|
|
flag set. */
|
370 |
|
|
|
371 |
|
|
bool
|
372 |
|
|
gfc_omp_private_debug_clause (tree decl, bool shared)
|
373 |
|
|
{
|
374 |
|
|
if (GFC_DECL_CRAY_POINTEE (decl))
|
375 |
|
|
return true;
|
376 |
|
|
|
377 |
|
|
if (GFC_DECL_COMMON_OR_EQUIV (decl)
|
378 |
|
|
&& DECL_HAS_VALUE_EXPR_P (decl))
|
379 |
|
|
{
|
380 |
|
|
tree value = DECL_VALUE_EXPR (decl);
|
381 |
|
|
|
382 |
|
|
if (TREE_CODE (value) == COMPONENT_REF
|
383 |
|
|
&& TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
|
384 |
|
|
&& GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
|
385 |
|
|
return shared;
|
386 |
|
|
}
|
387 |
|
|
|
388 |
|
|
return false;
|
389 |
|
|
}
|
390 |
|
|
|
391 |
|
|
/* Register language specific type size variables as potentially OpenMP
|
392 |
|
|
firstprivate variables. */
|
393 |
|
|
|
394 |
|
|
void
|
395 |
|
|
gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
|
396 |
|
|
{
|
397 |
|
|
if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
|
398 |
|
|
{
|
399 |
|
|
int r;
|
400 |
|
|
|
401 |
|
|
gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
|
402 |
|
|
for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
|
403 |
|
|
{
|
404 |
|
|
omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
|
405 |
|
|
omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
|
406 |
|
|
omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
|
407 |
|
|
}
|
408 |
|
|
omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
|
409 |
|
|
omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
|
410 |
|
|
}
|
411 |
|
|
}
|
412 |
|
|
|
413 |
|
|
|
414 |
|
|
static inline tree
|
415 |
|
|
gfc_trans_add_clause (tree node, tree tail)
|
416 |
|
|
{
|
417 |
|
|
OMP_CLAUSE_CHAIN (node) = tail;
|
418 |
|
|
return node;
|
419 |
|
|
}
|
420 |
|
|
|
421 |
|
|
static tree
|
422 |
|
|
gfc_trans_omp_variable (gfc_symbol *sym)
|
423 |
|
|
{
|
424 |
|
|
tree t = gfc_get_symbol_decl (sym);
|
425 |
|
|
tree parent_decl;
|
426 |
|
|
int parent_flag;
|
427 |
|
|
bool return_value;
|
428 |
|
|
bool alternate_entry;
|
429 |
|
|
bool entry_master;
|
430 |
|
|
|
431 |
|
|
return_value = sym->attr.function && sym->result == sym;
|
432 |
|
|
alternate_entry = sym->attr.function && sym->attr.entry
|
433 |
|
|
&& sym->result == sym;
|
434 |
|
|
entry_master = sym->attr.result
|
435 |
|
|
&& sym->ns->proc_name->attr.entry_master
|
436 |
|
|
&& !gfc_return_by_reference (sym->ns->proc_name);
|
437 |
|
|
parent_decl = DECL_CONTEXT (current_function_decl);
|
438 |
|
|
|
439 |
|
|
if ((t == parent_decl && return_value)
|
440 |
|
|
|| (sym->ns && sym->ns->proc_name
|
441 |
|
|
&& sym->ns->proc_name->backend_decl == parent_decl
|
442 |
|
|
&& (alternate_entry || entry_master)))
|
443 |
|
|
parent_flag = 1;
|
444 |
|
|
else
|
445 |
|
|
parent_flag = 0;
|
446 |
|
|
|
447 |
|
|
/* Special case for assigning the return value of a function.
|
448 |
|
|
Self recursive functions must have an explicit return value. */
|
449 |
|
|
if (return_value && (t == current_function_decl || parent_flag))
|
450 |
|
|
t = gfc_get_fake_result_decl (sym, parent_flag);
|
451 |
|
|
|
452 |
|
|
/* Similarly for alternate entry points. */
|
453 |
|
|
else if (alternate_entry
|
454 |
|
|
&& (sym->ns->proc_name->backend_decl == current_function_decl
|
455 |
|
|
|| parent_flag))
|
456 |
|
|
{
|
457 |
|
|
gfc_entry_list *el = NULL;
|
458 |
|
|
|
459 |
|
|
for (el = sym->ns->entries; el; el = el->next)
|
460 |
|
|
if (sym == el->sym)
|
461 |
|
|
{
|
462 |
|
|
t = gfc_get_fake_result_decl (sym, parent_flag);
|
463 |
|
|
break;
|
464 |
|
|
}
|
465 |
|
|
}
|
466 |
|
|
|
467 |
|
|
else if (entry_master
|
468 |
|
|
&& (sym->ns->proc_name->backend_decl == current_function_decl
|
469 |
|
|
|| parent_flag))
|
470 |
|
|
t = gfc_get_fake_result_decl (sym, parent_flag);
|
471 |
|
|
|
472 |
|
|
return t;
|
473 |
|
|
}
|
474 |
|
|
|
475 |
|
|
static tree
|
476 |
|
|
gfc_trans_omp_variable_list (enum omp_clause_code code, gfc_namelist *namelist,
|
477 |
|
|
tree list)
|
478 |
|
|
{
|
479 |
|
|
for (; namelist != NULL; namelist = namelist->next)
|
480 |
|
|
if (namelist->sym->attr.referenced)
|
481 |
|
|
{
|
482 |
|
|
tree t = gfc_trans_omp_variable (namelist->sym);
|
483 |
|
|
if (t != error_mark_node)
|
484 |
|
|
{
|
485 |
|
|
tree node = build_omp_clause (input_location, code);
|
486 |
|
|
OMP_CLAUSE_DECL (node) = t;
|
487 |
|
|
list = gfc_trans_add_clause (node, list);
|
488 |
|
|
}
|
489 |
|
|
}
|
490 |
|
|
return list;
|
491 |
|
|
}
|
492 |
|
|
|
493 |
|
|
static void
|
494 |
|
|
gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where)
|
495 |
|
|
{
|
496 |
|
|
gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
|
497 |
|
|
gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
|
498 |
|
|
gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
|
499 |
|
|
gfc_expr *e1, *e2, *e3, *e4;
|
500 |
|
|
gfc_ref *ref;
|
501 |
|
|
tree decl, backend_decl, stmt, type, outer_decl;
|
502 |
|
|
locus old_loc = gfc_current_locus;
|
503 |
|
|
const char *iname;
|
504 |
|
|
gfc_try t;
|
505 |
|
|
|
506 |
|
|
decl = OMP_CLAUSE_DECL (c);
|
507 |
|
|
gfc_current_locus = where;
|
508 |
|
|
type = TREE_TYPE (decl);
|
509 |
|
|
outer_decl = create_tmp_var_raw (type, NULL);
|
510 |
|
|
if (TREE_CODE (decl) == PARM_DECL
|
511 |
|
|
&& TREE_CODE (type) == REFERENCE_TYPE
|
512 |
|
|
&& GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type))
|
513 |
|
|
&& GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type)) == GFC_ARRAY_ALLOCATABLE)
|
514 |
|
|
{
|
515 |
|
|
decl = build_fold_indirect_ref (decl);
|
516 |
|
|
type = TREE_TYPE (type);
|
517 |
|
|
}
|
518 |
|
|
|
519 |
|
|
/* Create a fake symbol for init value. */
|
520 |
|
|
memset (&init_val_sym, 0, sizeof (init_val_sym));
|
521 |
|
|
init_val_sym.ns = sym->ns;
|
522 |
|
|
init_val_sym.name = sym->name;
|
523 |
|
|
init_val_sym.ts = sym->ts;
|
524 |
|
|
init_val_sym.attr.referenced = 1;
|
525 |
|
|
init_val_sym.declared_at = where;
|
526 |
|
|
init_val_sym.attr.flavor = FL_VARIABLE;
|
527 |
|
|
backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
|
528 |
|
|
init_val_sym.backend_decl = backend_decl;
|
529 |
|
|
|
530 |
|
|
/* Create a fake symbol for the outer array reference. */
|
531 |
|
|
outer_sym = *sym;
|
532 |
|
|
outer_sym.as = gfc_copy_array_spec (sym->as);
|
533 |
|
|
outer_sym.attr.dummy = 0;
|
534 |
|
|
outer_sym.attr.result = 0;
|
535 |
|
|
outer_sym.attr.flavor = FL_VARIABLE;
|
536 |
|
|
outer_sym.backend_decl = outer_decl;
|
537 |
|
|
if (decl != OMP_CLAUSE_DECL (c))
|
538 |
|
|
outer_sym.backend_decl = build_fold_indirect_ref (outer_decl);
|
539 |
|
|
|
540 |
|
|
/* Create fake symtrees for it. */
|
541 |
|
|
symtree1 = gfc_new_symtree (&root1, sym->name);
|
542 |
|
|
symtree1->n.sym = sym;
|
543 |
|
|
gcc_assert (symtree1 == root1);
|
544 |
|
|
|
545 |
|
|
symtree2 = gfc_new_symtree (&root2, sym->name);
|
546 |
|
|
symtree2->n.sym = &init_val_sym;
|
547 |
|
|
gcc_assert (symtree2 == root2);
|
548 |
|
|
|
549 |
|
|
symtree3 = gfc_new_symtree (&root3, sym->name);
|
550 |
|
|
symtree3->n.sym = &outer_sym;
|
551 |
|
|
gcc_assert (symtree3 == root3);
|
552 |
|
|
|
553 |
|
|
/* Create expressions. */
|
554 |
|
|
e1 = gfc_get_expr ();
|
555 |
|
|
e1->expr_type = EXPR_VARIABLE;
|
556 |
|
|
e1->where = where;
|
557 |
|
|
e1->symtree = symtree1;
|
558 |
|
|
e1->ts = sym->ts;
|
559 |
|
|
e1->ref = ref = gfc_get_ref ();
|
560 |
|
|
ref->type = REF_ARRAY;
|
561 |
|
|
ref->u.ar.where = where;
|
562 |
|
|
ref->u.ar.as = sym->as;
|
563 |
|
|
ref->u.ar.type = AR_FULL;
|
564 |
|
|
ref->u.ar.dimen = 0;
|
565 |
|
|
t = gfc_resolve_expr (e1);
|
566 |
|
|
gcc_assert (t == SUCCESS);
|
567 |
|
|
|
568 |
|
|
e2 = gfc_get_expr ();
|
569 |
|
|
e2->expr_type = EXPR_VARIABLE;
|
570 |
|
|
e2->where = where;
|
571 |
|
|
e2->symtree = symtree2;
|
572 |
|
|
e2->ts = sym->ts;
|
573 |
|
|
t = gfc_resolve_expr (e2);
|
574 |
|
|
gcc_assert (t == SUCCESS);
|
575 |
|
|
|
576 |
|
|
e3 = gfc_copy_expr (e1);
|
577 |
|
|
e3->symtree = symtree3;
|
578 |
|
|
t = gfc_resolve_expr (e3);
|
579 |
|
|
gcc_assert (t == SUCCESS);
|
580 |
|
|
|
581 |
|
|
iname = NULL;
|
582 |
|
|
switch (OMP_CLAUSE_REDUCTION_CODE (c))
|
583 |
|
|
{
|
584 |
|
|
case PLUS_EXPR:
|
585 |
|
|
case MINUS_EXPR:
|
586 |
|
|
e4 = gfc_add (e3, e1);
|
587 |
|
|
break;
|
588 |
|
|
case MULT_EXPR:
|
589 |
|
|
e4 = gfc_multiply (e3, e1);
|
590 |
|
|
break;
|
591 |
|
|
case TRUTH_ANDIF_EXPR:
|
592 |
|
|
e4 = gfc_and (e3, e1);
|
593 |
|
|
break;
|
594 |
|
|
case TRUTH_ORIF_EXPR:
|
595 |
|
|
e4 = gfc_or (e3, e1);
|
596 |
|
|
break;
|
597 |
|
|
case EQ_EXPR:
|
598 |
|
|
e4 = gfc_eqv (e3, e1);
|
599 |
|
|
break;
|
600 |
|
|
case NE_EXPR:
|
601 |
|
|
e4 = gfc_neqv (e3, e1);
|
602 |
|
|
break;
|
603 |
|
|
case MIN_EXPR:
|
604 |
|
|
iname = "min";
|
605 |
|
|
break;
|
606 |
|
|
case MAX_EXPR:
|
607 |
|
|
iname = "max";
|
608 |
|
|
break;
|
609 |
|
|
case BIT_AND_EXPR:
|
610 |
|
|
iname = "iand";
|
611 |
|
|
break;
|
612 |
|
|
case BIT_IOR_EXPR:
|
613 |
|
|
iname = "ior";
|
614 |
|
|
break;
|
615 |
|
|
case BIT_XOR_EXPR:
|
616 |
|
|
iname = "ieor";
|
617 |
|
|
break;
|
618 |
|
|
default:
|
619 |
|
|
gcc_unreachable ();
|
620 |
|
|
}
|
621 |
|
|
if (iname != NULL)
|
622 |
|
|
{
|
623 |
|
|
memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
|
624 |
|
|
intrinsic_sym.ns = sym->ns;
|
625 |
|
|
intrinsic_sym.name = iname;
|
626 |
|
|
intrinsic_sym.ts = sym->ts;
|
627 |
|
|
intrinsic_sym.attr.referenced = 1;
|
628 |
|
|
intrinsic_sym.attr.intrinsic = 1;
|
629 |
|
|
intrinsic_sym.attr.function = 1;
|
630 |
|
|
intrinsic_sym.result = &intrinsic_sym;
|
631 |
|
|
intrinsic_sym.declared_at = where;
|
632 |
|
|
|
633 |
|
|
symtree4 = gfc_new_symtree (&root4, iname);
|
634 |
|
|
symtree4->n.sym = &intrinsic_sym;
|
635 |
|
|
gcc_assert (symtree4 == root4);
|
636 |
|
|
|
637 |
|
|
e4 = gfc_get_expr ();
|
638 |
|
|
e4->expr_type = EXPR_FUNCTION;
|
639 |
|
|
e4->where = where;
|
640 |
|
|
e4->symtree = symtree4;
|
641 |
|
|
e4->value.function.isym = gfc_find_function (iname);
|
642 |
|
|
e4->value.function.actual = gfc_get_actual_arglist ();
|
643 |
|
|
e4->value.function.actual->expr = e3;
|
644 |
|
|
e4->value.function.actual->next = gfc_get_actual_arglist ();
|
645 |
|
|
e4->value.function.actual->next->expr = e1;
|
646 |
|
|
}
|
647 |
|
|
/* e1 and e3 have been stored as arguments of e4, avoid sharing. */
|
648 |
|
|
e1 = gfc_copy_expr (e1);
|
649 |
|
|
e3 = gfc_copy_expr (e3);
|
650 |
|
|
t = gfc_resolve_expr (e4);
|
651 |
|
|
gcc_assert (t == SUCCESS);
|
652 |
|
|
|
653 |
|
|
/* Create the init statement list. */
|
654 |
|
|
pushlevel (0);
|
655 |
|
|
if (GFC_DESCRIPTOR_TYPE_P (type)
|
656 |
|
|
&& GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
|
657 |
|
|
{
|
658 |
|
|
/* If decl is an allocatable array, it needs to be allocated
|
659 |
|
|
with the same bounds as the outer var. */
|
660 |
|
|
tree rank, size, esize, ptr;
|
661 |
|
|
stmtblock_t block;
|
662 |
|
|
|
663 |
|
|
gfc_start_block (&block);
|
664 |
|
|
|
665 |
|
|
gfc_add_modify (&block, decl, outer_sym.backend_decl);
|
666 |
|
|
rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
|
667 |
|
|
size = gfc_conv_descriptor_ubound_get (decl, rank);
|
668 |
|
|
size = fold_build2_loc (input_location, MINUS_EXPR,
|
669 |
|
|
gfc_array_index_type, size,
|
670 |
|
|
gfc_conv_descriptor_lbound_get (decl, rank));
|
671 |
|
|
size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
|
672 |
|
|
size, gfc_index_one_node);
|
673 |
|
|
if (GFC_TYPE_ARRAY_RANK (type) > 1)
|
674 |
|
|
size = fold_build2_loc (input_location, MULT_EXPR,
|
675 |
|
|
gfc_array_index_type, size,
|
676 |
|
|
gfc_conv_descriptor_stride_get (decl, rank));
|
677 |
|
|
esize = fold_convert (gfc_array_index_type,
|
678 |
|
|
TYPE_SIZE_UNIT (gfc_get_element_type (type)));
|
679 |
|
|
size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
|
680 |
|
|
size, esize);
|
681 |
|
|
size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
|
682 |
|
|
|
683 |
|
|
ptr = gfc_create_var (pvoid_type_node, NULL);
|
684 |
|
|
gfc_allocate_using_malloc (&block, ptr, size, NULL_TREE);
|
685 |
|
|
gfc_conv_descriptor_data_set (&block, decl, ptr);
|
686 |
|
|
|
687 |
|
|
gfc_add_expr_to_block (&block, gfc_trans_assignment (e1, e2, false,
|
688 |
|
|
false));
|
689 |
|
|
stmt = gfc_finish_block (&block);
|
690 |
|
|
}
|
691 |
|
|
else
|
692 |
|
|
stmt = gfc_trans_assignment (e1, e2, false, false);
|
693 |
|
|
if (TREE_CODE (stmt) != BIND_EXPR)
|
694 |
|
|
stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
|
695 |
|
|
else
|
696 |
|
|
poplevel (0, 0, 0);
|
697 |
|
|
OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
|
698 |
|
|
|
699 |
|
|
/* Create the merge statement list. */
|
700 |
|
|
pushlevel (0);
|
701 |
|
|
if (GFC_DESCRIPTOR_TYPE_P (type)
|
702 |
|
|
&& GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
|
703 |
|
|
{
|
704 |
|
|
/* If decl is an allocatable array, it needs to be deallocated
|
705 |
|
|
afterwards. */
|
706 |
|
|
stmtblock_t block;
|
707 |
|
|
|
708 |
|
|
gfc_start_block (&block);
|
709 |
|
|
gfc_add_expr_to_block (&block, gfc_trans_assignment (e3, e4, false,
|
710 |
|
|
true));
|
711 |
|
|
gfc_add_expr_to_block (&block, gfc_trans_dealloc_allocated (decl, false));
|
712 |
|
|
stmt = gfc_finish_block (&block);
|
713 |
|
|
}
|
714 |
|
|
else
|
715 |
|
|
stmt = gfc_trans_assignment (e3, e4, false, true);
|
716 |
|
|
if (TREE_CODE (stmt) != BIND_EXPR)
|
717 |
|
|
stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
|
718 |
|
|
else
|
719 |
|
|
poplevel (0, 0, 0);
|
720 |
|
|
OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
|
721 |
|
|
|
722 |
|
|
/* And stick the placeholder VAR_DECL into the clause as well. */
|
723 |
|
|
OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_decl;
|
724 |
|
|
|
725 |
|
|
gfc_current_locus = old_loc;
|
726 |
|
|
|
727 |
|
|
gfc_free_expr (e1);
|
728 |
|
|
gfc_free_expr (e2);
|
729 |
|
|
gfc_free_expr (e3);
|
730 |
|
|
gfc_free_expr (e4);
|
731 |
|
|
free (symtree1);
|
732 |
|
|
free (symtree2);
|
733 |
|
|
free (symtree3);
|
734 |
|
|
free (symtree4);
|
735 |
|
|
gfc_free_array_spec (outer_sym.as);
|
736 |
|
|
}
|
737 |
|
|
|
738 |
|
|
static tree
|
739 |
|
|
gfc_trans_omp_reduction_list (gfc_namelist *namelist, tree list,
|
740 |
|
|
enum tree_code reduction_code, locus where)
|
741 |
|
|
{
|
742 |
|
|
for (; namelist != NULL; namelist = namelist->next)
|
743 |
|
|
if (namelist->sym->attr.referenced)
|
744 |
|
|
{
|
745 |
|
|
tree t = gfc_trans_omp_variable (namelist->sym);
|
746 |
|
|
if (t != error_mark_node)
|
747 |
|
|
{
|
748 |
|
|
tree node = build_omp_clause (where.lb->location,
|
749 |
|
|
OMP_CLAUSE_REDUCTION);
|
750 |
|
|
OMP_CLAUSE_DECL (node) = t;
|
751 |
|
|
OMP_CLAUSE_REDUCTION_CODE (node) = reduction_code;
|
752 |
|
|
if (namelist->sym->attr.dimension)
|
753 |
|
|
gfc_trans_omp_array_reduction (node, namelist->sym, where);
|
754 |
|
|
list = gfc_trans_add_clause (node, list);
|
755 |
|
|
}
|
756 |
|
|
}
|
757 |
|
|
return list;
|
758 |
|
|
}
|
759 |
|
|
|
760 |
|
|
static tree
|
761 |
|
|
gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
|
762 |
|
|
locus where)
|
763 |
|
|
{
|
764 |
|
|
tree omp_clauses = NULL_TREE, chunk_size, c;
|
765 |
|
|
int list;
|
766 |
|
|
enum omp_clause_code clause_code;
|
767 |
|
|
gfc_se se;
|
768 |
|
|
|
769 |
|
|
if (clauses == NULL)
|
770 |
|
|
return NULL_TREE;
|
771 |
|
|
|
772 |
|
|
for (list = 0; list < OMP_LIST_NUM; list++)
|
773 |
|
|
{
|
774 |
|
|
gfc_namelist *n = clauses->lists[list];
|
775 |
|
|
|
776 |
|
|
if (n == NULL)
|
777 |
|
|
continue;
|
778 |
|
|
if (list >= OMP_LIST_REDUCTION_FIRST
|
779 |
|
|
&& list <= OMP_LIST_REDUCTION_LAST)
|
780 |
|
|
{
|
781 |
|
|
enum tree_code reduction_code;
|
782 |
|
|
switch (list)
|
783 |
|
|
{
|
784 |
|
|
case OMP_LIST_PLUS:
|
785 |
|
|
reduction_code = PLUS_EXPR;
|
786 |
|
|
break;
|
787 |
|
|
case OMP_LIST_MULT:
|
788 |
|
|
reduction_code = MULT_EXPR;
|
789 |
|
|
break;
|
790 |
|
|
case OMP_LIST_SUB:
|
791 |
|
|
reduction_code = MINUS_EXPR;
|
792 |
|
|
break;
|
793 |
|
|
case OMP_LIST_AND:
|
794 |
|
|
reduction_code = TRUTH_ANDIF_EXPR;
|
795 |
|
|
break;
|
796 |
|
|
case OMP_LIST_OR:
|
797 |
|
|
reduction_code = TRUTH_ORIF_EXPR;
|
798 |
|
|
break;
|
799 |
|
|
case OMP_LIST_EQV:
|
800 |
|
|
reduction_code = EQ_EXPR;
|
801 |
|
|
break;
|
802 |
|
|
case OMP_LIST_NEQV:
|
803 |
|
|
reduction_code = NE_EXPR;
|
804 |
|
|
break;
|
805 |
|
|
case OMP_LIST_MAX:
|
806 |
|
|
reduction_code = MAX_EXPR;
|
807 |
|
|
break;
|
808 |
|
|
case OMP_LIST_MIN:
|
809 |
|
|
reduction_code = MIN_EXPR;
|
810 |
|
|
break;
|
811 |
|
|
case OMP_LIST_IAND:
|
812 |
|
|
reduction_code = BIT_AND_EXPR;
|
813 |
|
|
break;
|
814 |
|
|
case OMP_LIST_IOR:
|
815 |
|
|
reduction_code = BIT_IOR_EXPR;
|
816 |
|
|
break;
|
817 |
|
|
case OMP_LIST_IEOR:
|
818 |
|
|
reduction_code = BIT_XOR_EXPR;
|
819 |
|
|
break;
|
820 |
|
|
default:
|
821 |
|
|
gcc_unreachable ();
|
822 |
|
|
}
|
823 |
|
|
omp_clauses
|
824 |
|
|
= gfc_trans_omp_reduction_list (n, omp_clauses, reduction_code,
|
825 |
|
|
where);
|
826 |
|
|
continue;
|
827 |
|
|
}
|
828 |
|
|
switch (list)
|
829 |
|
|
{
|
830 |
|
|
case OMP_LIST_PRIVATE:
|
831 |
|
|
clause_code = OMP_CLAUSE_PRIVATE;
|
832 |
|
|
goto add_clause;
|
833 |
|
|
case OMP_LIST_SHARED:
|
834 |
|
|
clause_code = OMP_CLAUSE_SHARED;
|
835 |
|
|
goto add_clause;
|
836 |
|
|
case OMP_LIST_FIRSTPRIVATE:
|
837 |
|
|
clause_code = OMP_CLAUSE_FIRSTPRIVATE;
|
838 |
|
|
goto add_clause;
|
839 |
|
|
case OMP_LIST_LASTPRIVATE:
|
840 |
|
|
clause_code = OMP_CLAUSE_LASTPRIVATE;
|
841 |
|
|
goto add_clause;
|
842 |
|
|
case OMP_LIST_COPYIN:
|
843 |
|
|
clause_code = OMP_CLAUSE_COPYIN;
|
844 |
|
|
goto add_clause;
|
845 |
|
|
case OMP_LIST_COPYPRIVATE:
|
846 |
|
|
clause_code = OMP_CLAUSE_COPYPRIVATE;
|
847 |
|
|
/* FALLTHROUGH */
|
848 |
|
|
add_clause:
|
849 |
|
|
omp_clauses
|
850 |
|
|
= gfc_trans_omp_variable_list (clause_code, n, omp_clauses);
|
851 |
|
|
break;
|
852 |
|
|
default:
|
853 |
|
|
break;
|
854 |
|
|
}
|
855 |
|
|
}
|
856 |
|
|
|
857 |
|
|
if (clauses->if_expr)
|
858 |
|
|
{
|
859 |
|
|
tree if_var;
|
860 |
|
|
|
861 |
|
|
gfc_init_se (&se, NULL);
|
862 |
|
|
gfc_conv_expr (&se, clauses->if_expr);
|
863 |
|
|
gfc_add_block_to_block (block, &se.pre);
|
864 |
|
|
if_var = gfc_evaluate_now (se.expr, block);
|
865 |
|
|
gfc_add_block_to_block (block, &se.post);
|
866 |
|
|
|
867 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_IF);
|
868 |
|
|
OMP_CLAUSE_IF_EXPR (c) = if_var;
|
869 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
870 |
|
|
}
|
871 |
|
|
|
872 |
|
|
if (clauses->final_expr)
|
873 |
|
|
{
|
874 |
|
|
tree final_var;
|
875 |
|
|
|
876 |
|
|
gfc_init_se (&se, NULL);
|
877 |
|
|
gfc_conv_expr (&se, clauses->final_expr);
|
878 |
|
|
gfc_add_block_to_block (block, &se.pre);
|
879 |
|
|
final_var = gfc_evaluate_now (se.expr, block);
|
880 |
|
|
gfc_add_block_to_block (block, &se.post);
|
881 |
|
|
|
882 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_FINAL);
|
883 |
|
|
OMP_CLAUSE_FINAL_EXPR (c) = final_var;
|
884 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
885 |
|
|
}
|
886 |
|
|
|
887 |
|
|
if (clauses->num_threads)
|
888 |
|
|
{
|
889 |
|
|
tree num_threads;
|
890 |
|
|
|
891 |
|
|
gfc_init_se (&se, NULL);
|
892 |
|
|
gfc_conv_expr (&se, clauses->num_threads);
|
893 |
|
|
gfc_add_block_to_block (block, &se.pre);
|
894 |
|
|
num_threads = gfc_evaluate_now (se.expr, block);
|
895 |
|
|
gfc_add_block_to_block (block, &se.post);
|
896 |
|
|
|
897 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_THREADS);
|
898 |
|
|
OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
|
899 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
900 |
|
|
}
|
901 |
|
|
|
902 |
|
|
chunk_size = NULL_TREE;
|
903 |
|
|
if (clauses->chunk_size)
|
904 |
|
|
{
|
905 |
|
|
gfc_init_se (&se, NULL);
|
906 |
|
|
gfc_conv_expr (&se, clauses->chunk_size);
|
907 |
|
|
gfc_add_block_to_block (block, &se.pre);
|
908 |
|
|
chunk_size = gfc_evaluate_now (se.expr, block);
|
909 |
|
|
gfc_add_block_to_block (block, &se.post);
|
910 |
|
|
}
|
911 |
|
|
|
912 |
|
|
if (clauses->sched_kind != OMP_SCHED_NONE)
|
913 |
|
|
{
|
914 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_SCHEDULE);
|
915 |
|
|
OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
|
916 |
|
|
switch (clauses->sched_kind)
|
917 |
|
|
{
|
918 |
|
|
case OMP_SCHED_STATIC:
|
919 |
|
|
OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
|
920 |
|
|
break;
|
921 |
|
|
case OMP_SCHED_DYNAMIC:
|
922 |
|
|
OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
|
923 |
|
|
break;
|
924 |
|
|
case OMP_SCHED_GUIDED:
|
925 |
|
|
OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
|
926 |
|
|
break;
|
927 |
|
|
case OMP_SCHED_RUNTIME:
|
928 |
|
|
OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
|
929 |
|
|
break;
|
930 |
|
|
case OMP_SCHED_AUTO:
|
931 |
|
|
OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
|
932 |
|
|
break;
|
933 |
|
|
default:
|
934 |
|
|
gcc_unreachable ();
|
935 |
|
|
}
|
936 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
937 |
|
|
}
|
938 |
|
|
|
939 |
|
|
if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
|
940 |
|
|
{
|
941 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEFAULT);
|
942 |
|
|
switch (clauses->default_sharing)
|
943 |
|
|
{
|
944 |
|
|
case OMP_DEFAULT_NONE:
|
945 |
|
|
OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
|
946 |
|
|
break;
|
947 |
|
|
case OMP_DEFAULT_SHARED:
|
948 |
|
|
OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
|
949 |
|
|
break;
|
950 |
|
|
case OMP_DEFAULT_PRIVATE:
|
951 |
|
|
OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
|
952 |
|
|
break;
|
953 |
|
|
case OMP_DEFAULT_FIRSTPRIVATE:
|
954 |
|
|
OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
|
955 |
|
|
break;
|
956 |
|
|
default:
|
957 |
|
|
gcc_unreachable ();
|
958 |
|
|
}
|
959 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
960 |
|
|
}
|
961 |
|
|
|
962 |
|
|
if (clauses->nowait)
|
963 |
|
|
{
|
964 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOWAIT);
|
965 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
966 |
|
|
}
|
967 |
|
|
|
968 |
|
|
if (clauses->ordered)
|
969 |
|
|
{
|
970 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
|
971 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
972 |
|
|
}
|
973 |
|
|
|
974 |
|
|
if (clauses->untied)
|
975 |
|
|
{
|
976 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_UNTIED);
|
977 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
978 |
|
|
}
|
979 |
|
|
|
980 |
|
|
if (clauses->mergeable)
|
981 |
|
|
{
|
982 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_MERGEABLE);
|
983 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
984 |
|
|
}
|
985 |
|
|
|
986 |
|
|
if (clauses->collapse)
|
987 |
|
|
{
|
988 |
|
|
c = build_omp_clause (where.lb->location, OMP_CLAUSE_COLLAPSE);
|
989 |
|
|
OMP_CLAUSE_COLLAPSE_EXPR (c)
|
990 |
|
|
= build_int_cst (integer_type_node, clauses->collapse);
|
991 |
|
|
omp_clauses = gfc_trans_add_clause (c, omp_clauses);
|
992 |
|
|
}
|
993 |
|
|
|
994 |
|
|
return omp_clauses;
|
995 |
|
|
}
|
996 |
|
|
|
997 |
|
|
/* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
|
998 |
|
|
|
999 |
|
|
static tree
|
1000 |
|
|
gfc_trans_omp_code (gfc_code *code, bool force_empty)
|
1001 |
|
|
{
|
1002 |
|
|
tree stmt;
|
1003 |
|
|
|
1004 |
|
|
pushlevel (0);
|
1005 |
|
|
stmt = gfc_trans_code (code);
|
1006 |
|
|
if (TREE_CODE (stmt) != BIND_EXPR)
|
1007 |
|
|
{
|
1008 |
|
|
if (!IS_EMPTY_STMT (stmt) || force_empty)
|
1009 |
|
|
{
|
1010 |
|
|
tree block = poplevel (1, 0, 0);
|
1011 |
|
|
stmt = build3_v (BIND_EXPR, NULL, stmt, block);
|
1012 |
|
|
}
|
1013 |
|
|
else
|
1014 |
|
|
poplevel (0, 0, 0);
|
1015 |
|
|
}
|
1016 |
|
|
else
|
1017 |
|
|
poplevel (0, 0, 0);
|
1018 |
|
|
return stmt;
|
1019 |
|
|
}
|
1020 |
|
|
|
1021 |
|
|
|
1022 |
|
|
static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
|
1023 |
|
|
static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
|
1024 |
|
|
|
1025 |
|
|
static tree
|
1026 |
|
|
gfc_trans_omp_atomic (gfc_code *code)
|
1027 |
|
|
{
|
1028 |
|
|
gfc_code *atomic_code = code;
|
1029 |
|
|
gfc_se lse;
|
1030 |
|
|
gfc_se rse;
|
1031 |
|
|
gfc_se vse;
|
1032 |
|
|
gfc_expr *expr2, *e;
|
1033 |
|
|
gfc_symbol *var;
|
1034 |
|
|
stmtblock_t block;
|
1035 |
|
|
tree lhsaddr, type, rhs, x;
|
1036 |
|
|
enum tree_code op = ERROR_MARK;
|
1037 |
|
|
enum tree_code aop = OMP_ATOMIC;
|
1038 |
|
|
bool var_on_left = false;
|
1039 |
|
|
|
1040 |
|
|
code = code->block->next;
|
1041 |
|
|
gcc_assert (code->op == EXEC_ASSIGN);
|
1042 |
|
|
var = code->expr1->symtree->n.sym;
|
1043 |
|
|
|
1044 |
|
|
gfc_init_se (&lse, NULL);
|
1045 |
|
|
gfc_init_se (&rse, NULL);
|
1046 |
|
|
gfc_init_se (&vse, NULL);
|
1047 |
|
|
gfc_start_block (&block);
|
1048 |
|
|
|
1049 |
|
|
expr2 = code->expr2;
|
1050 |
|
|
if (expr2->expr_type == EXPR_FUNCTION
|
1051 |
|
|
&& expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
|
1052 |
|
|
expr2 = expr2->value.function.actual->expr;
|
1053 |
|
|
|
1054 |
|
|
switch (atomic_code->ext.omp_atomic)
|
1055 |
|
|
{
|
1056 |
|
|
case GFC_OMP_ATOMIC_READ:
|
1057 |
|
|
gfc_conv_expr (&vse, code->expr1);
|
1058 |
|
|
gfc_add_block_to_block (&block, &vse.pre);
|
1059 |
|
|
|
1060 |
|
|
gfc_conv_expr (&lse, expr2);
|
1061 |
|
|
gfc_add_block_to_block (&block, &lse.pre);
|
1062 |
|
|
type = TREE_TYPE (lse.expr);
|
1063 |
|
|
lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
|
1064 |
|
|
|
1065 |
|
|
x = build1 (OMP_ATOMIC_READ, type, lhsaddr);
|
1066 |
|
|
x = convert (TREE_TYPE (vse.expr), x);
|
1067 |
|
|
gfc_add_modify (&block, vse.expr, x);
|
1068 |
|
|
|
1069 |
|
|
gfc_add_block_to_block (&block, &lse.pre);
|
1070 |
|
|
gfc_add_block_to_block (&block, &rse.pre);
|
1071 |
|
|
|
1072 |
|
|
return gfc_finish_block (&block);
|
1073 |
|
|
case GFC_OMP_ATOMIC_CAPTURE:
|
1074 |
|
|
aop = OMP_ATOMIC_CAPTURE_NEW;
|
1075 |
|
|
if (expr2->expr_type == EXPR_VARIABLE)
|
1076 |
|
|
{
|
1077 |
|
|
aop = OMP_ATOMIC_CAPTURE_OLD;
|
1078 |
|
|
gfc_conv_expr (&vse, code->expr1);
|
1079 |
|
|
gfc_add_block_to_block (&block, &vse.pre);
|
1080 |
|
|
|
1081 |
|
|
gfc_conv_expr (&lse, expr2);
|
1082 |
|
|
gfc_add_block_to_block (&block, &lse.pre);
|
1083 |
|
|
gfc_init_se (&lse, NULL);
|
1084 |
|
|
code = code->next;
|
1085 |
|
|
var = code->expr1->symtree->n.sym;
|
1086 |
|
|
expr2 = code->expr2;
|
1087 |
|
|
if (expr2->expr_type == EXPR_FUNCTION
|
1088 |
|
|
&& expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
|
1089 |
|
|
expr2 = expr2->value.function.actual->expr;
|
1090 |
|
|
}
|
1091 |
|
|
break;
|
1092 |
|
|
default:
|
1093 |
|
|
break;
|
1094 |
|
|
}
|
1095 |
|
|
|
1096 |
|
|
gfc_conv_expr (&lse, code->expr1);
|
1097 |
|
|
gfc_add_block_to_block (&block, &lse.pre);
|
1098 |
|
|
type = TREE_TYPE (lse.expr);
|
1099 |
|
|
lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
|
1100 |
|
|
|
1101 |
|
|
if (atomic_code->ext.omp_atomic == GFC_OMP_ATOMIC_WRITE)
|
1102 |
|
|
{
|
1103 |
|
|
gfc_conv_expr (&rse, expr2);
|
1104 |
|
|
gfc_add_block_to_block (&block, &rse.pre);
|
1105 |
|
|
}
|
1106 |
|
|
else if (expr2->expr_type == EXPR_OP)
|
1107 |
|
|
{
|
1108 |
|
|
gfc_expr *e;
|
1109 |
|
|
switch (expr2->value.op.op)
|
1110 |
|
|
{
|
1111 |
|
|
case INTRINSIC_PLUS:
|
1112 |
|
|
op = PLUS_EXPR;
|
1113 |
|
|
break;
|
1114 |
|
|
case INTRINSIC_TIMES:
|
1115 |
|
|
op = MULT_EXPR;
|
1116 |
|
|
break;
|
1117 |
|
|
case INTRINSIC_MINUS:
|
1118 |
|
|
op = MINUS_EXPR;
|
1119 |
|
|
break;
|
1120 |
|
|
case INTRINSIC_DIVIDE:
|
1121 |
|
|
if (expr2->ts.type == BT_INTEGER)
|
1122 |
|
|
op = TRUNC_DIV_EXPR;
|
1123 |
|
|
else
|
1124 |
|
|
op = RDIV_EXPR;
|
1125 |
|
|
break;
|
1126 |
|
|
case INTRINSIC_AND:
|
1127 |
|
|
op = TRUTH_ANDIF_EXPR;
|
1128 |
|
|
break;
|
1129 |
|
|
case INTRINSIC_OR:
|
1130 |
|
|
op = TRUTH_ORIF_EXPR;
|
1131 |
|
|
break;
|
1132 |
|
|
case INTRINSIC_EQV:
|
1133 |
|
|
op = EQ_EXPR;
|
1134 |
|
|
break;
|
1135 |
|
|
case INTRINSIC_NEQV:
|
1136 |
|
|
op = NE_EXPR;
|
1137 |
|
|
break;
|
1138 |
|
|
default:
|
1139 |
|
|
gcc_unreachable ();
|
1140 |
|
|
}
|
1141 |
|
|
e = expr2->value.op.op1;
|
1142 |
|
|
if (e->expr_type == EXPR_FUNCTION
|
1143 |
|
|
&& e->value.function.isym->id == GFC_ISYM_CONVERSION)
|
1144 |
|
|
e = e->value.function.actual->expr;
|
1145 |
|
|
if (e->expr_type == EXPR_VARIABLE
|
1146 |
|
|
&& e->symtree != NULL
|
1147 |
|
|
&& e->symtree->n.sym == var)
|
1148 |
|
|
{
|
1149 |
|
|
expr2 = expr2->value.op.op2;
|
1150 |
|
|
var_on_left = true;
|
1151 |
|
|
}
|
1152 |
|
|
else
|
1153 |
|
|
{
|
1154 |
|
|
e = expr2->value.op.op2;
|
1155 |
|
|
if (e->expr_type == EXPR_FUNCTION
|
1156 |
|
|
&& e->value.function.isym->id == GFC_ISYM_CONVERSION)
|
1157 |
|
|
e = e->value.function.actual->expr;
|
1158 |
|
|
gcc_assert (e->expr_type == EXPR_VARIABLE
|
1159 |
|
|
&& e->symtree != NULL
|
1160 |
|
|
&& e->symtree->n.sym == var);
|
1161 |
|
|
expr2 = expr2->value.op.op1;
|
1162 |
|
|
var_on_left = false;
|
1163 |
|
|
}
|
1164 |
|
|
gfc_conv_expr (&rse, expr2);
|
1165 |
|
|
gfc_add_block_to_block (&block, &rse.pre);
|
1166 |
|
|
}
|
1167 |
|
|
else
|
1168 |
|
|
{
|
1169 |
|
|
gcc_assert (expr2->expr_type == EXPR_FUNCTION);
|
1170 |
|
|
switch (expr2->value.function.isym->id)
|
1171 |
|
|
{
|
1172 |
|
|
case GFC_ISYM_MIN:
|
1173 |
|
|
op = MIN_EXPR;
|
1174 |
|
|
break;
|
1175 |
|
|
case GFC_ISYM_MAX:
|
1176 |
|
|
op = MAX_EXPR;
|
1177 |
|
|
break;
|
1178 |
|
|
case GFC_ISYM_IAND:
|
1179 |
|
|
op = BIT_AND_EXPR;
|
1180 |
|
|
break;
|
1181 |
|
|
case GFC_ISYM_IOR:
|
1182 |
|
|
op = BIT_IOR_EXPR;
|
1183 |
|
|
break;
|
1184 |
|
|
case GFC_ISYM_IEOR:
|
1185 |
|
|
op = BIT_XOR_EXPR;
|
1186 |
|
|
break;
|
1187 |
|
|
default:
|
1188 |
|
|
gcc_unreachable ();
|
1189 |
|
|
}
|
1190 |
|
|
e = expr2->value.function.actual->expr;
|
1191 |
|
|
gcc_assert (e->expr_type == EXPR_VARIABLE
|
1192 |
|
|
&& e->symtree != NULL
|
1193 |
|
|
&& e->symtree->n.sym == var);
|
1194 |
|
|
|
1195 |
|
|
gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
|
1196 |
|
|
gfc_add_block_to_block (&block, &rse.pre);
|
1197 |
|
|
if (expr2->value.function.actual->next->next != NULL)
|
1198 |
|
|
{
|
1199 |
|
|
tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
|
1200 |
|
|
gfc_actual_arglist *arg;
|
1201 |
|
|
|
1202 |
|
|
gfc_add_modify (&block, accum, rse.expr);
|
1203 |
|
|
for (arg = expr2->value.function.actual->next->next; arg;
|
1204 |
|
|
arg = arg->next)
|
1205 |
|
|
{
|
1206 |
|
|
gfc_init_block (&rse.pre);
|
1207 |
|
|
gfc_conv_expr (&rse, arg->expr);
|
1208 |
|
|
gfc_add_block_to_block (&block, &rse.pre);
|
1209 |
|
|
x = fold_build2_loc (input_location, op, TREE_TYPE (accum),
|
1210 |
|
|
accum, rse.expr);
|
1211 |
|
|
gfc_add_modify (&block, accum, x);
|
1212 |
|
|
}
|
1213 |
|
|
|
1214 |
|
|
rse.expr = accum;
|
1215 |
|
|
}
|
1216 |
|
|
|
1217 |
|
|
expr2 = expr2->value.function.actual->next->expr;
|
1218 |
|
|
}
|
1219 |
|
|
|
1220 |
|
|
lhsaddr = save_expr (lhsaddr);
|
1221 |
|
|
rhs = gfc_evaluate_now (rse.expr, &block);
|
1222 |
|
|
|
1223 |
|
|
if (atomic_code->ext.omp_atomic == GFC_OMP_ATOMIC_WRITE)
|
1224 |
|
|
x = rhs;
|
1225 |
|
|
else
|
1226 |
|
|
{
|
1227 |
|
|
x = convert (TREE_TYPE (rhs),
|
1228 |
|
|
build_fold_indirect_ref_loc (input_location, lhsaddr));
|
1229 |
|
|
if (var_on_left)
|
1230 |
|
|
x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), x, rhs);
|
1231 |
|
|
else
|
1232 |
|
|
x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), rhs, x);
|
1233 |
|
|
}
|
1234 |
|
|
|
1235 |
|
|
if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
|
1236 |
|
|
&& TREE_CODE (type) != COMPLEX_TYPE)
|
1237 |
|
|
x = fold_build1_loc (input_location, REALPART_EXPR,
|
1238 |
|
|
TREE_TYPE (TREE_TYPE (rhs)), x);
|
1239 |
|
|
|
1240 |
|
|
gfc_add_block_to_block (&block, &lse.pre);
|
1241 |
|
|
gfc_add_block_to_block (&block, &rse.pre);
|
1242 |
|
|
|
1243 |
|
|
if (aop == OMP_ATOMIC)
|
1244 |
|
|
{
|
1245 |
|
|
x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
|
1246 |
|
|
gfc_add_expr_to_block (&block, x);
|
1247 |
|
|
}
|
1248 |
|
|
else
|
1249 |
|
|
{
|
1250 |
|
|
if (aop == OMP_ATOMIC_CAPTURE_NEW)
|
1251 |
|
|
{
|
1252 |
|
|
code = code->next;
|
1253 |
|
|
expr2 = code->expr2;
|
1254 |
|
|
if (expr2->expr_type == EXPR_FUNCTION
|
1255 |
|
|
&& expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
|
1256 |
|
|
expr2 = expr2->value.function.actual->expr;
|
1257 |
|
|
|
1258 |
|
|
gcc_assert (expr2->expr_type == EXPR_VARIABLE);
|
1259 |
|
|
gfc_conv_expr (&vse, code->expr1);
|
1260 |
|
|
gfc_add_block_to_block (&block, &vse.pre);
|
1261 |
|
|
|
1262 |
|
|
gfc_init_se (&lse, NULL);
|
1263 |
|
|
gfc_conv_expr (&lse, expr2);
|
1264 |
|
|
gfc_add_block_to_block (&block, &lse.pre);
|
1265 |
|
|
}
|
1266 |
|
|
x = build2 (aop, type, lhsaddr, convert (type, x));
|
1267 |
|
|
x = convert (TREE_TYPE (vse.expr), x);
|
1268 |
|
|
gfc_add_modify (&block, vse.expr, x);
|
1269 |
|
|
}
|
1270 |
|
|
|
1271 |
|
|
return gfc_finish_block (&block);
|
1272 |
|
|
}
|
1273 |
|
|
|
1274 |
|
|
static tree
|
1275 |
|
|
gfc_trans_omp_barrier (void)
|
1276 |
|
|
{
|
1277 |
|
|
tree decl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER);
|
1278 |
|
|
return build_call_expr_loc (input_location, decl, 0);
|
1279 |
|
|
}
|
1280 |
|
|
|
1281 |
|
|
static tree
|
1282 |
|
|
gfc_trans_omp_critical (gfc_code *code)
|
1283 |
|
|
{
|
1284 |
|
|
tree name = NULL_TREE, stmt;
|
1285 |
|
|
if (code->ext.omp_name != NULL)
|
1286 |
|
|
name = get_identifier (code->ext.omp_name);
|
1287 |
|
|
stmt = gfc_trans_code (code->block->next);
|
1288 |
|
|
return build2_loc (input_location, OMP_CRITICAL, void_type_node, stmt, name);
|
1289 |
|
|
}
|
1290 |
|
|
|
1291 |
|
|
typedef struct dovar_init_d {
|
1292 |
|
|
tree var;
|
1293 |
|
|
tree init;
|
1294 |
|
|
} dovar_init;
|
1295 |
|
|
|
1296 |
|
|
DEF_VEC_O(dovar_init);
|
1297 |
|
|
DEF_VEC_ALLOC_O(dovar_init,heap);
|
1298 |
|
|
|
1299 |
|
|
static tree
|
1300 |
|
|
gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
|
1301 |
|
|
gfc_omp_clauses *do_clauses, tree par_clauses)
|
1302 |
|
|
{
|
1303 |
|
|
gfc_se se;
|
1304 |
|
|
tree dovar, stmt, from, to, step, type, init, cond, incr;
|
1305 |
|
|
tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
|
1306 |
|
|
stmtblock_t block;
|
1307 |
|
|
stmtblock_t body;
|
1308 |
|
|
gfc_omp_clauses *clauses = code->ext.omp_clauses;
|
1309 |
|
|
int i, collapse = clauses->collapse;
|
1310 |
|
|
VEC(dovar_init,heap) *inits = NULL;
|
1311 |
|
|
dovar_init *di;
|
1312 |
|
|
unsigned ix;
|
1313 |
|
|
|
1314 |
|
|
if (collapse <= 0)
|
1315 |
|
|
collapse = 1;
|
1316 |
|
|
|
1317 |
|
|
code = code->block->next;
|
1318 |
|
|
gcc_assert (code->op == EXEC_DO);
|
1319 |
|
|
|
1320 |
|
|
init = make_tree_vec (collapse);
|
1321 |
|
|
cond = make_tree_vec (collapse);
|
1322 |
|
|
incr = make_tree_vec (collapse);
|
1323 |
|
|
|
1324 |
|
|
if (pblock == NULL)
|
1325 |
|
|
{
|
1326 |
|
|
gfc_start_block (&block);
|
1327 |
|
|
pblock = █
|
1328 |
|
|
}
|
1329 |
|
|
|
1330 |
|
|
omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
|
1331 |
|
|
|
1332 |
|
|
for (i = 0; i < collapse; i++)
|
1333 |
|
|
{
|
1334 |
|
|
int simple = 0;
|
1335 |
|
|
int dovar_found = 0;
|
1336 |
|
|
tree dovar_decl;
|
1337 |
|
|
|
1338 |
|
|
if (clauses)
|
1339 |
|
|
{
|
1340 |
|
|
gfc_namelist *n;
|
1341 |
|
|
for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL;
|
1342 |
|
|
n = n->next)
|
1343 |
|
|
if (code->ext.iterator->var->symtree->n.sym == n->sym)
|
1344 |
|
|
break;
|
1345 |
|
|
if (n != NULL)
|
1346 |
|
|
dovar_found = 1;
|
1347 |
|
|
else if (n == NULL)
|
1348 |
|
|
for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
|
1349 |
|
|
if (code->ext.iterator->var->symtree->n.sym == n->sym)
|
1350 |
|
|
break;
|
1351 |
|
|
if (n != NULL)
|
1352 |
|
|
dovar_found++;
|
1353 |
|
|
}
|
1354 |
|
|
|
1355 |
|
|
/* Evaluate all the expressions in the iterator. */
|
1356 |
|
|
gfc_init_se (&se, NULL);
|
1357 |
|
|
gfc_conv_expr_lhs (&se, code->ext.iterator->var);
|
1358 |
|
|
gfc_add_block_to_block (pblock, &se.pre);
|
1359 |
|
|
dovar = se.expr;
|
1360 |
|
|
type = TREE_TYPE (dovar);
|
1361 |
|
|
gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
|
1362 |
|
|
|
1363 |
|
|
gfc_init_se (&se, NULL);
|
1364 |
|
|
gfc_conv_expr_val (&se, code->ext.iterator->start);
|
1365 |
|
|
gfc_add_block_to_block (pblock, &se.pre);
|
1366 |
|
|
from = gfc_evaluate_now (se.expr, pblock);
|
1367 |
|
|
|
1368 |
|
|
gfc_init_se (&se, NULL);
|
1369 |
|
|
gfc_conv_expr_val (&se, code->ext.iterator->end);
|
1370 |
|
|
gfc_add_block_to_block (pblock, &se.pre);
|
1371 |
|
|
to = gfc_evaluate_now (se.expr, pblock);
|
1372 |
|
|
|
1373 |
|
|
gfc_init_se (&se, NULL);
|
1374 |
|
|
gfc_conv_expr_val (&se, code->ext.iterator->step);
|
1375 |
|
|
gfc_add_block_to_block (pblock, &se.pre);
|
1376 |
|
|
step = gfc_evaluate_now (se.expr, pblock);
|
1377 |
|
|
dovar_decl = dovar;
|
1378 |
|
|
|
1379 |
|
|
/* Special case simple loops. */
|
1380 |
|
|
if (TREE_CODE (dovar) == VAR_DECL)
|
1381 |
|
|
{
|
1382 |
|
|
if (integer_onep (step))
|
1383 |
|
|
simple = 1;
|
1384 |
|
|
else if (tree_int_cst_equal (step, integer_minus_one_node))
|
1385 |
|
|
simple = -1;
|
1386 |
|
|
}
|
1387 |
|
|
else
|
1388 |
|
|
dovar_decl
|
1389 |
|
|
= gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym);
|
1390 |
|
|
|
1391 |
|
|
/* Loop body. */
|
1392 |
|
|
if (simple)
|
1393 |
|
|
{
|
1394 |
|
|
TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
|
1395 |
|
|
/* The condition should not be folded. */
|
1396 |
|
|
TREE_VEC_ELT (cond, i) = build2_loc (input_location, simple > 0
|
1397 |
|
|
? LE_EXPR : GE_EXPR,
|
1398 |
|
|
boolean_type_node, dovar, to);
|
1399 |
|
|
TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
|
1400 |
|
|
type, dovar, step);
|
1401 |
|
|
TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
|
1402 |
|
|
MODIFY_EXPR,
|
1403 |
|
|
type, dovar,
|
1404 |
|
|
TREE_VEC_ELT (incr, i));
|
1405 |
|
|
}
|
1406 |
|
|
else
|
1407 |
|
|
{
|
1408 |
|
|
/* STEP is not 1 or -1. Use:
|
1409 |
|
|
for (count = 0; count < (to + step - from) / step; count++)
|
1410 |
|
|
{
|
1411 |
|
|
dovar = from + count * step;
|
1412 |
|
|
body;
|
1413 |
|
|
cycle_label:;
|
1414 |
|
|
} */
|
1415 |
|
|
tmp = fold_build2_loc (input_location, MINUS_EXPR, type, step, from);
|
1416 |
|
|
tmp = fold_build2_loc (input_location, PLUS_EXPR, type, to, tmp);
|
1417 |
|
|
tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, type, tmp,
|
1418 |
|
|
step);
|
1419 |
|
|
tmp = gfc_evaluate_now (tmp, pblock);
|
1420 |
|
|
count = gfc_create_var (type, "count");
|
1421 |
|
|
TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
|
1422 |
|
|
build_int_cst (type, 0));
|
1423 |
|
|
/* The condition should not be folded. */
|
1424 |
|
|
TREE_VEC_ELT (cond, i) = build2_loc (input_location, LT_EXPR,
|
1425 |
|
|
boolean_type_node,
|
1426 |
|
|
count, tmp);
|
1427 |
|
|
TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
|
1428 |
|
|
type, count,
|
1429 |
|
|
build_int_cst (type, 1));
|
1430 |
|
|
TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
|
1431 |
|
|
MODIFY_EXPR, type, count,
|
1432 |
|
|
TREE_VEC_ELT (incr, i));
|
1433 |
|
|
|
1434 |
|
|
/* Initialize DOVAR. */
|
1435 |
|
|
tmp = fold_build2_loc (input_location, MULT_EXPR, type, count, step);
|
1436 |
|
|
tmp = fold_build2_loc (input_location, PLUS_EXPR, type, from, tmp);
|
1437 |
|
|
di = VEC_safe_push (dovar_init, heap, inits, NULL);
|
1438 |
|
|
di->var = dovar;
|
1439 |
|
|
di->init = tmp;
|
1440 |
|
|
}
|
1441 |
|
|
|
1442 |
|
|
if (!dovar_found)
|
1443 |
|
|
{
|
1444 |
|
|
tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
|
1445 |
|
|
OMP_CLAUSE_DECL (tmp) = dovar_decl;
|
1446 |
|
|
omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
|
1447 |
|
|
}
|
1448 |
|
|
else if (dovar_found == 2)
|
1449 |
|
|
{
|
1450 |
|
|
tree c = NULL;
|
1451 |
|
|
|
1452 |
|
|
tmp = NULL;
|
1453 |
|
|
if (!simple)
|
1454 |
|
|
{
|
1455 |
|
|
/* If dovar is lastprivate, but different counter is used,
|
1456 |
|
|
dovar += step needs to be added to
|
1457 |
|
|
OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
|
1458 |
|
|
will have the value on entry of the last loop, rather
|
1459 |
|
|
than value after iterator increment. */
|
1460 |
|
|
tmp = gfc_evaluate_now (step, pblock);
|
1461 |
|
|
tmp = fold_build2_loc (input_location, PLUS_EXPR, type, dovar,
|
1462 |
|
|
tmp);
|
1463 |
|
|
tmp = fold_build2_loc (input_location, MODIFY_EXPR, type,
|
1464 |
|
|
dovar, tmp);
|
1465 |
|
|
for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
|
1466 |
|
|
if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
|
1467 |
|
|
&& OMP_CLAUSE_DECL (c) == dovar_decl)
|
1468 |
|
|
{
|
1469 |
|
|
OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
|
1470 |
|
|
break;
|
1471 |
|
|
}
|
1472 |
|
|
}
|
1473 |
|
|
if (c == NULL && par_clauses != NULL)
|
1474 |
|
|
{
|
1475 |
|
|
for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
|
1476 |
|
|
if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
|
1477 |
|
|
&& OMP_CLAUSE_DECL (c) == dovar_decl)
|
1478 |
|
|
{
|
1479 |
|
|
tree l = build_omp_clause (input_location,
|
1480 |
|
|
OMP_CLAUSE_LASTPRIVATE);
|
1481 |
|
|
OMP_CLAUSE_DECL (l) = dovar_decl;
|
1482 |
|
|
OMP_CLAUSE_CHAIN (l) = omp_clauses;
|
1483 |
|
|
OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
|
1484 |
|
|
omp_clauses = l;
|
1485 |
|
|
OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
|
1486 |
|
|
break;
|
1487 |
|
|
}
|
1488 |
|
|
}
|
1489 |
|
|
gcc_assert (simple || c != NULL);
|
1490 |
|
|
}
|
1491 |
|
|
if (!simple)
|
1492 |
|
|
{
|
1493 |
|
|
tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
|
1494 |
|
|
OMP_CLAUSE_DECL (tmp) = count;
|
1495 |
|
|
omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
|
1496 |
|
|
}
|
1497 |
|
|
|
1498 |
|
|
if (i + 1 < collapse)
|
1499 |
|
|
code = code->block->next;
|
1500 |
|
|
}
|
1501 |
|
|
|
1502 |
|
|
if (pblock != &block)
|
1503 |
|
|
{
|
1504 |
|
|
pushlevel (0);
|
1505 |
|
|
gfc_start_block (&block);
|
1506 |
|
|
}
|
1507 |
|
|
|
1508 |
|
|
gfc_start_block (&body);
|
1509 |
|
|
|
1510 |
|
|
FOR_EACH_VEC_ELT (dovar_init, inits, ix, di)
|
1511 |
|
|
gfc_add_modify (&body, di->var, di->init);
|
1512 |
|
|
VEC_free (dovar_init, heap, inits);
|
1513 |
|
|
|
1514 |
|
|
/* Cycle statement is implemented with a goto. Exit statement must not be
|
1515 |
|
|
present for this loop. */
|
1516 |
|
|
cycle_label = gfc_build_label_decl (NULL_TREE);
|
1517 |
|
|
|
1518 |
|
|
/* Put these labels where they can be found later. */
|
1519 |
|
|
|
1520 |
|
|
code->cycle_label = cycle_label;
|
1521 |
|
|
code->exit_label = NULL_TREE;
|
1522 |
|
|
|
1523 |
|
|
/* Main loop body. */
|
1524 |
|
|
tmp = gfc_trans_omp_code (code->block->next, true);
|
1525 |
|
|
gfc_add_expr_to_block (&body, tmp);
|
1526 |
|
|
|
1527 |
|
|
/* Label for cycle statements (if needed). */
|
1528 |
|
|
if (TREE_USED (cycle_label))
|
1529 |
|
|
{
|
1530 |
|
|
tmp = build1_v (LABEL_EXPR, cycle_label);
|
1531 |
|
|
gfc_add_expr_to_block (&body, tmp);
|
1532 |
|
|
}
|
1533 |
|
|
|
1534 |
|
|
/* End of loop body. */
|
1535 |
|
|
stmt = make_node (OMP_FOR);
|
1536 |
|
|
|
1537 |
|
|
TREE_TYPE (stmt) = void_type_node;
|
1538 |
|
|
OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
|
1539 |
|
|
OMP_FOR_CLAUSES (stmt) = omp_clauses;
|
1540 |
|
|
OMP_FOR_INIT (stmt) = init;
|
1541 |
|
|
OMP_FOR_COND (stmt) = cond;
|
1542 |
|
|
OMP_FOR_INCR (stmt) = incr;
|
1543 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
1544 |
|
|
|
1545 |
|
|
return gfc_finish_block (&block);
|
1546 |
|
|
}
|
1547 |
|
|
|
1548 |
|
|
static tree
|
1549 |
|
|
gfc_trans_omp_flush (void)
|
1550 |
|
|
{
|
1551 |
|
|
tree decl = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
|
1552 |
|
|
return build_call_expr_loc (input_location, decl, 0);
|
1553 |
|
|
}
|
1554 |
|
|
|
1555 |
|
|
static tree
|
1556 |
|
|
gfc_trans_omp_master (gfc_code *code)
|
1557 |
|
|
{
|
1558 |
|
|
tree stmt = gfc_trans_code (code->block->next);
|
1559 |
|
|
if (IS_EMPTY_STMT (stmt))
|
1560 |
|
|
return stmt;
|
1561 |
|
|
return build1_v (OMP_MASTER, stmt);
|
1562 |
|
|
}
|
1563 |
|
|
|
1564 |
|
|
static tree
|
1565 |
|
|
gfc_trans_omp_ordered (gfc_code *code)
|
1566 |
|
|
{
|
1567 |
|
|
return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
|
1568 |
|
|
}
|
1569 |
|
|
|
1570 |
|
|
static tree
|
1571 |
|
|
gfc_trans_omp_parallel (gfc_code *code)
|
1572 |
|
|
{
|
1573 |
|
|
stmtblock_t block;
|
1574 |
|
|
tree stmt, omp_clauses;
|
1575 |
|
|
|
1576 |
|
|
gfc_start_block (&block);
|
1577 |
|
|
omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
|
1578 |
|
|
code->loc);
|
1579 |
|
|
stmt = gfc_trans_omp_code (code->block->next, true);
|
1580 |
|
|
stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
|
1581 |
|
|
omp_clauses);
|
1582 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
1583 |
|
|
return gfc_finish_block (&block);
|
1584 |
|
|
}
|
1585 |
|
|
|
1586 |
|
|
static tree
|
1587 |
|
|
gfc_trans_omp_parallel_do (gfc_code *code)
|
1588 |
|
|
{
|
1589 |
|
|
stmtblock_t block, *pblock = NULL;
|
1590 |
|
|
gfc_omp_clauses parallel_clauses, do_clauses;
|
1591 |
|
|
tree stmt, omp_clauses = NULL_TREE;
|
1592 |
|
|
|
1593 |
|
|
gfc_start_block (&block);
|
1594 |
|
|
|
1595 |
|
|
memset (&do_clauses, 0, sizeof (do_clauses));
|
1596 |
|
|
if (code->ext.omp_clauses != NULL)
|
1597 |
|
|
{
|
1598 |
|
|
memcpy (¶llel_clauses, code->ext.omp_clauses,
|
1599 |
|
|
sizeof (parallel_clauses));
|
1600 |
|
|
do_clauses.sched_kind = parallel_clauses.sched_kind;
|
1601 |
|
|
do_clauses.chunk_size = parallel_clauses.chunk_size;
|
1602 |
|
|
do_clauses.ordered = parallel_clauses.ordered;
|
1603 |
|
|
do_clauses.collapse = parallel_clauses.collapse;
|
1604 |
|
|
parallel_clauses.sched_kind = OMP_SCHED_NONE;
|
1605 |
|
|
parallel_clauses.chunk_size = NULL;
|
1606 |
|
|
parallel_clauses.ordered = false;
|
1607 |
|
|
parallel_clauses.collapse = 0;
|
1608 |
|
|
omp_clauses = gfc_trans_omp_clauses (&block, ¶llel_clauses,
|
1609 |
|
|
code->loc);
|
1610 |
|
|
}
|
1611 |
|
|
do_clauses.nowait = true;
|
1612 |
|
|
if (!do_clauses.ordered && do_clauses.sched_kind != OMP_SCHED_STATIC)
|
1613 |
|
|
pblock = █
|
1614 |
|
|
else
|
1615 |
|
|
pushlevel (0);
|
1616 |
|
|
stmt = gfc_trans_omp_do (code, pblock, &do_clauses, omp_clauses);
|
1617 |
|
|
if (TREE_CODE (stmt) != BIND_EXPR)
|
1618 |
|
|
stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
|
1619 |
|
|
else
|
1620 |
|
|
poplevel (0, 0, 0);
|
1621 |
|
|
stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
|
1622 |
|
|
omp_clauses);
|
1623 |
|
|
OMP_PARALLEL_COMBINED (stmt) = 1;
|
1624 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
1625 |
|
|
return gfc_finish_block (&block);
|
1626 |
|
|
}
|
1627 |
|
|
|
1628 |
|
|
static tree
|
1629 |
|
|
gfc_trans_omp_parallel_sections (gfc_code *code)
|
1630 |
|
|
{
|
1631 |
|
|
stmtblock_t block;
|
1632 |
|
|
gfc_omp_clauses section_clauses;
|
1633 |
|
|
tree stmt, omp_clauses;
|
1634 |
|
|
|
1635 |
|
|
memset (§ion_clauses, 0, sizeof (section_clauses));
|
1636 |
|
|
section_clauses.nowait = true;
|
1637 |
|
|
|
1638 |
|
|
gfc_start_block (&block);
|
1639 |
|
|
omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
|
1640 |
|
|
code->loc);
|
1641 |
|
|
pushlevel (0);
|
1642 |
|
|
stmt = gfc_trans_omp_sections (code, §ion_clauses);
|
1643 |
|
|
if (TREE_CODE (stmt) != BIND_EXPR)
|
1644 |
|
|
stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
|
1645 |
|
|
else
|
1646 |
|
|
poplevel (0, 0, 0);
|
1647 |
|
|
stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
|
1648 |
|
|
omp_clauses);
|
1649 |
|
|
OMP_PARALLEL_COMBINED (stmt) = 1;
|
1650 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
1651 |
|
|
return gfc_finish_block (&block);
|
1652 |
|
|
}
|
1653 |
|
|
|
1654 |
|
|
static tree
|
1655 |
|
|
gfc_trans_omp_parallel_workshare (gfc_code *code)
|
1656 |
|
|
{
|
1657 |
|
|
stmtblock_t block;
|
1658 |
|
|
gfc_omp_clauses workshare_clauses;
|
1659 |
|
|
tree stmt, omp_clauses;
|
1660 |
|
|
|
1661 |
|
|
memset (&workshare_clauses, 0, sizeof (workshare_clauses));
|
1662 |
|
|
workshare_clauses.nowait = true;
|
1663 |
|
|
|
1664 |
|
|
gfc_start_block (&block);
|
1665 |
|
|
omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
|
1666 |
|
|
code->loc);
|
1667 |
|
|
pushlevel (0);
|
1668 |
|
|
stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
|
1669 |
|
|
if (TREE_CODE (stmt) != BIND_EXPR)
|
1670 |
|
|
stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
|
1671 |
|
|
else
|
1672 |
|
|
poplevel (0, 0, 0);
|
1673 |
|
|
stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
|
1674 |
|
|
omp_clauses);
|
1675 |
|
|
OMP_PARALLEL_COMBINED (stmt) = 1;
|
1676 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
1677 |
|
|
return gfc_finish_block (&block);
|
1678 |
|
|
}
|
1679 |
|
|
|
1680 |
|
|
static tree
|
1681 |
|
|
gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
|
1682 |
|
|
{
|
1683 |
|
|
stmtblock_t block, body;
|
1684 |
|
|
tree omp_clauses, stmt;
|
1685 |
|
|
bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
|
1686 |
|
|
|
1687 |
|
|
gfc_start_block (&block);
|
1688 |
|
|
|
1689 |
|
|
omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
|
1690 |
|
|
|
1691 |
|
|
gfc_init_block (&body);
|
1692 |
|
|
for (code = code->block; code; code = code->block)
|
1693 |
|
|
{
|
1694 |
|
|
/* Last section is special because of lastprivate, so even if it
|
1695 |
|
|
is empty, chain it in. */
|
1696 |
|
|
stmt = gfc_trans_omp_code (code->next,
|
1697 |
|
|
has_lastprivate && code->block == NULL);
|
1698 |
|
|
if (! IS_EMPTY_STMT (stmt))
|
1699 |
|
|
{
|
1700 |
|
|
stmt = build1_v (OMP_SECTION, stmt);
|
1701 |
|
|
gfc_add_expr_to_block (&body, stmt);
|
1702 |
|
|
}
|
1703 |
|
|
}
|
1704 |
|
|
stmt = gfc_finish_block (&body);
|
1705 |
|
|
|
1706 |
|
|
stmt = build2_loc (input_location, OMP_SECTIONS, void_type_node, stmt,
|
1707 |
|
|
omp_clauses);
|
1708 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
1709 |
|
|
|
1710 |
|
|
return gfc_finish_block (&block);
|
1711 |
|
|
}
|
1712 |
|
|
|
1713 |
|
|
static tree
|
1714 |
|
|
gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
|
1715 |
|
|
{
|
1716 |
|
|
tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
|
1717 |
|
|
tree stmt = gfc_trans_omp_code (code->block->next, true);
|
1718 |
|
|
stmt = build2_loc (input_location, OMP_SINGLE, void_type_node, stmt,
|
1719 |
|
|
omp_clauses);
|
1720 |
|
|
return stmt;
|
1721 |
|
|
}
|
1722 |
|
|
|
1723 |
|
|
static tree
|
1724 |
|
|
gfc_trans_omp_task (gfc_code *code)
|
1725 |
|
|
{
|
1726 |
|
|
stmtblock_t block;
|
1727 |
|
|
tree stmt, omp_clauses;
|
1728 |
|
|
|
1729 |
|
|
gfc_start_block (&block);
|
1730 |
|
|
omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
|
1731 |
|
|
code->loc);
|
1732 |
|
|
stmt = gfc_trans_omp_code (code->block->next, true);
|
1733 |
|
|
stmt = build2_loc (input_location, OMP_TASK, void_type_node, stmt,
|
1734 |
|
|
omp_clauses);
|
1735 |
|
|
gfc_add_expr_to_block (&block, stmt);
|
1736 |
|
|
return gfc_finish_block (&block);
|
1737 |
|
|
}
|
1738 |
|
|
|
1739 |
|
|
static tree
|
1740 |
|
|
gfc_trans_omp_taskwait (void)
|
1741 |
|
|
{
|
1742 |
|
|
tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT);
|
1743 |
|
|
return build_call_expr_loc (input_location, decl, 0);
|
1744 |
|
|
}
|
1745 |
|
|
|
1746 |
|
|
static tree
|
1747 |
|
|
gfc_trans_omp_taskyield (void)
|
1748 |
|
|
{
|
1749 |
|
|
tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD);
|
1750 |
|
|
return build_call_expr_loc (input_location, decl, 0);
|
1751 |
|
|
}
|
1752 |
|
|
|
1753 |
|
|
static tree
|
1754 |
|
|
gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
|
1755 |
|
|
{
|
1756 |
|
|
tree res, tmp, stmt;
|
1757 |
|
|
stmtblock_t block, *pblock = NULL;
|
1758 |
|
|
stmtblock_t singleblock;
|
1759 |
|
|
int saved_ompws_flags;
|
1760 |
|
|
bool singleblock_in_progress = false;
|
1761 |
|
|
/* True if previous gfc_code in workshare construct is not workshared. */
|
1762 |
|
|
bool prev_singleunit;
|
1763 |
|
|
|
1764 |
|
|
code = code->block->next;
|
1765 |
|
|
|
1766 |
|
|
pushlevel (0);
|
1767 |
|
|
|
1768 |
|
|
gfc_start_block (&block);
|
1769 |
|
|
pblock = █
|
1770 |
|
|
|
1771 |
|
|
ompws_flags = OMPWS_WORKSHARE_FLAG;
|
1772 |
|
|
prev_singleunit = false;
|
1773 |
|
|
|
1774 |
|
|
/* Translate statements one by one to trees until we reach
|
1775 |
|
|
the end of the workshare construct. Adjacent gfc_codes that
|
1776 |
|
|
are a single unit of work are clustered and encapsulated in a
|
1777 |
|
|
single OMP_SINGLE construct. */
|
1778 |
|
|
for (; code; code = code->next)
|
1779 |
|
|
{
|
1780 |
|
|
if (code->here != 0)
|
1781 |
|
|
{
|
1782 |
|
|
res = gfc_trans_label_here (code);
|
1783 |
|
|
gfc_add_expr_to_block (pblock, res);
|
1784 |
|
|
}
|
1785 |
|
|
|
1786 |
|
|
/* No dependence analysis, use for clauses with wait.
|
1787 |
|
|
If this is the last gfc_code, use default omp_clauses. */
|
1788 |
|
|
if (code->next == NULL && clauses->nowait)
|
1789 |
|
|
ompws_flags |= OMPWS_NOWAIT;
|
1790 |
|
|
|
1791 |
|
|
/* By default, every gfc_code is a single unit of work. */
|
1792 |
|
|
ompws_flags |= OMPWS_CURR_SINGLEUNIT;
|
1793 |
|
|
ompws_flags &= ~OMPWS_SCALARIZER_WS;
|
1794 |
|
|
|
1795 |
|
|
switch (code->op)
|
1796 |
|
|
{
|
1797 |
|
|
case EXEC_NOP:
|
1798 |
|
|
res = NULL_TREE;
|
1799 |
|
|
break;
|
1800 |
|
|
|
1801 |
|
|
case EXEC_ASSIGN:
|
1802 |
|
|
res = gfc_trans_assign (code);
|
1803 |
|
|
break;
|
1804 |
|
|
|
1805 |
|
|
case EXEC_POINTER_ASSIGN:
|
1806 |
|
|
res = gfc_trans_pointer_assign (code);
|
1807 |
|
|
break;
|
1808 |
|
|
|
1809 |
|
|
case EXEC_INIT_ASSIGN:
|
1810 |
|
|
res = gfc_trans_init_assign (code);
|
1811 |
|
|
break;
|
1812 |
|
|
|
1813 |
|
|
case EXEC_FORALL:
|
1814 |
|
|
res = gfc_trans_forall (code);
|
1815 |
|
|
break;
|
1816 |
|
|
|
1817 |
|
|
case EXEC_WHERE:
|
1818 |
|
|
res = gfc_trans_where (code);
|
1819 |
|
|
break;
|
1820 |
|
|
|
1821 |
|
|
case EXEC_OMP_ATOMIC:
|
1822 |
|
|
res = gfc_trans_omp_directive (code);
|
1823 |
|
|
break;
|
1824 |
|
|
|
1825 |
|
|
case EXEC_OMP_PARALLEL:
|
1826 |
|
|
case EXEC_OMP_PARALLEL_DO:
|
1827 |
|
|
case EXEC_OMP_PARALLEL_SECTIONS:
|
1828 |
|
|
case EXEC_OMP_PARALLEL_WORKSHARE:
|
1829 |
|
|
case EXEC_OMP_CRITICAL:
|
1830 |
|
|
saved_ompws_flags = ompws_flags;
|
1831 |
|
|
ompws_flags = 0;
|
1832 |
|
|
res = gfc_trans_omp_directive (code);
|
1833 |
|
|
ompws_flags = saved_ompws_flags;
|
1834 |
|
|
break;
|
1835 |
|
|
|
1836 |
|
|
default:
|
1837 |
|
|
internal_error ("gfc_trans_omp_workshare(): Bad statement code");
|
1838 |
|
|
}
|
1839 |
|
|
|
1840 |
|
|
gfc_set_backend_locus (&code->loc);
|
1841 |
|
|
|
1842 |
|
|
if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
|
1843 |
|
|
{
|
1844 |
|
|
if (prev_singleunit)
|
1845 |
|
|
{
|
1846 |
|
|
if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
|
1847 |
|
|
/* Add current gfc_code to single block. */
|
1848 |
|
|
gfc_add_expr_to_block (&singleblock, res);
|
1849 |
|
|
else
|
1850 |
|
|
{
|
1851 |
|
|
/* Finish single block and add it to pblock. */
|
1852 |
|
|
tmp = gfc_finish_block (&singleblock);
|
1853 |
|
|
tmp = build2_loc (input_location, OMP_SINGLE,
|
1854 |
|
|
void_type_node, tmp, NULL_TREE);
|
1855 |
|
|
gfc_add_expr_to_block (pblock, tmp);
|
1856 |
|
|
/* Add current gfc_code to pblock. */
|
1857 |
|
|
gfc_add_expr_to_block (pblock, res);
|
1858 |
|
|
singleblock_in_progress = false;
|
1859 |
|
|
}
|
1860 |
|
|
}
|
1861 |
|
|
else
|
1862 |
|
|
{
|
1863 |
|
|
if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
|
1864 |
|
|
{
|
1865 |
|
|
/* Start single block. */
|
1866 |
|
|
gfc_init_block (&singleblock);
|
1867 |
|
|
gfc_add_expr_to_block (&singleblock, res);
|
1868 |
|
|
singleblock_in_progress = true;
|
1869 |
|
|
}
|
1870 |
|
|
else
|
1871 |
|
|
/* Add the new statement to the block. */
|
1872 |
|
|
gfc_add_expr_to_block (pblock, res);
|
1873 |
|
|
}
|
1874 |
|
|
prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0;
|
1875 |
|
|
}
|
1876 |
|
|
}
|
1877 |
|
|
|
1878 |
|
|
/* Finish remaining SINGLE block, if we were in the middle of one. */
|
1879 |
|
|
if (singleblock_in_progress)
|
1880 |
|
|
{
|
1881 |
|
|
/* Finish single block and add it to pblock. */
|
1882 |
|
|
tmp = gfc_finish_block (&singleblock);
|
1883 |
|
|
tmp = build2_loc (input_location, OMP_SINGLE, void_type_node, tmp,
|
1884 |
|
|
clauses->nowait
|
1885 |
|
|
? build_omp_clause (input_location, OMP_CLAUSE_NOWAIT)
|
1886 |
|
|
: NULL_TREE);
|
1887 |
|
|
gfc_add_expr_to_block (pblock, tmp);
|
1888 |
|
|
}
|
1889 |
|
|
|
1890 |
|
|
stmt = gfc_finish_block (pblock);
|
1891 |
|
|
if (TREE_CODE (stmt) != BIND_EXPR)
|
1892 |
|
|
{
|
1893 |
|
|
if (!IS_EMPTY_STMT (stmt))
|
1894 |
|
|
{
|
1895 |
|
|
tree bindblock = poplevel (1, 0, 0);
|
1896 |
|
|
stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock);
|
1897 |
|
|
}
|
1898 |
|
|
else
|
1899 |
|
|
poplevel (0, 0, 0);
|
1900 |
|
|
}
|
1901 |
|
|
else
|
1902 |
|
|
poplevel (0, 0, 0);
|
1903 |
|
|
|
1904 |
|
|
if (IS_EMPTY_STMT (stmt) && !clauses->nowait)
|
1905 |
|
|
stmt = gfc_trans_omp_barrier ();
|
1906 |
|
|
|
1907 |
|
|
ompws_flags = 0;
|
1908 |
|
|
return stmt;
|
1909 |
|
|
}
|
1910 |
|
|
|
1911 |
|
|
tree
|
1912 |
|
|
gfc_trans_omp_directive (gfc_code *code)
|
1913 |
|
|
{
|
1914 |
|
|
switch (code->op)
|
1915 |
|
|
{
|
1916 |
|
|
case EXEC_OMP_ATOMIC:
|
1917 |
|
|
return gfc_trans_omp_atomic (code);
|
1918 |
|
|
case EXEC_OMP_BARRIER:
|
1919 |
|
|
return gfc_trans_omp_barrier ();
|
1920 |
|
|
case EXEC_OMP_CRITICAL:
|
1921 |
|
|
return gfc_trans_omp_critical (code);
|
1922 |
|
|
case EXEC_OMP_DO:
|
1923 |
|
|
return gfc_trans_omp_do (code, NULL, code->ext.omp_clauses, NULL);
|
1924 |
|
|
case EXEC_OMP_FLUSH:
|
1925 |
|
|
return gfc_trans_omp_flush ();
|
1926 |
|
|
case EXEC_OMP_MASTER:
|
1927 |
|
|
return gfc_trans_omp_master (code);
|
1928 |
|
|
case EXEC_OMP_ORDERED:
|
1929 |
|
|
return gfc_trans_omp_ordered (code);
|
1930 |
|
|
case EXEC_OMP_PARALLEL:
|
1931 |
|
|
return gfc_trans_omp_parallel (code);
|
1932 |
|
|
case EXEC_OMP_PARALLEL_DO:
|
1933 |
|
|
return gfc_trans_omp_parallel_do (code);
|
1934 |
|
|
case EXEC_OMP_PARALLEL_SECTIONS:
|
1935 |
|
|
return gfc_trans_omp_parallel_sections (code);
|
1936 |
|
|
case EXEC_OMP_PARALLEL_WORKSHARE:
|
1937 |
|
|
return gfc_trans_omp_parallel_workshare (code);
|
1938 |
|
|
case EXEC_OMP_SECTIONS:
|
1939 |
|
|
return gfc_trans_omp_sections (code, code->ext.omp_clauses);
|
1940 |
|
|
case EXEC_OMP_SINGLE:
|
1941 |
|
|
return gfc_trans_omp_single (code, code->ext.omp_clauses);
|
1942 |
|
|
case EXEC_OMP_TASK:
|
1943 |
|
|
return gfc_trans_omp_task (code);
|
1944 |
|
|
case EXEC_OMP_TASKWAIT:
|
1945 |
|
|
return gfc_trans_omp_taskwait ();
|
1946 |
|
|
case EXEC_OMP_TASKYIELD:
|
1947 |
|
|
return gfc_trans_omp_taskyield ();
|
1948 |
|
|
case EXEC_OMP_WORKSHARE:
|
1949 |
|
|
return gfc_trans_omp_workshare (code, code->ext.omp_clauses);
|
1950 |
|
|
default:
|
1951 |
|
|
gcc_unreachable ();
|
1952 |
|
|
}
|
1953 |
|
|
}
|